From 626dbe73248a40532d25ca8ab83c49537ee7efb7 Mon Sep 17 00:00:00 2001 From: sdcb Date: Fri, 6 Dec 2024 22:26:01 +0800 Subject: [PATCH 1/5] update model provider name --- src/BE/DB/Enums/DBModelProvider.cs | 8 ++++---- src/BE/Services/Conversations/ConversationFactory.cs | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/BE/DB/Enums/DBModelProvider.cs b/src/BE/DB/Enums/DBModelProvider.cs index 9211a9cf..39df10ef 100644 --- a/src/BE/DB/Enums/DBModelProvider.cs +++ b/src/BE/DB/Enums/DBModelProvider.cs @@ -3,13 +3,13 @@ public enum DBModelProvider { Test = 0, - Azure = 1, + AzureOpenAI = 1, HunYuan = 2, LingYi = 3, Moonshot = 4, OpenAI = 5, - QianFan = 6, - QianWen = 7, - Spark = 8, + WenXinQianFan = 6, + AliyunDashscope = 7, + Sparkdesk = 8, ZhiPuAI = 9 } diff --git a/src/BE/Services/Conversations/ConversationFactory.cs b/src/BE/Services/Conversations/ConversationFactory.cs index 1410633d..0b5d4cc0 100644 --- a/src/BE/Services/Conversations/ConversationFactory.cs +++ b/src/BE/Services/Conversations/ConversationFactory.cs @@ -21,13 +21,13 @@ public ConversationService CreateConversationService(Model model) { DBModelProvider.Test => new TestConversationService(model), DBModelProvider.OpenAI => new OpenAIConversationService(model), - DBModelProvider.Azure => new AzureConversationService(model), - DBModelProvider.QianFan => new QianFanConversationService(model), - DBModelProvider.QianWen => new DashScopeConversationService(model), + DBModelProvider.AzureOpenAI => new AzureConversationService(model), + DBModelProvider.WenXinQianFan => new QianFanConversationService(model), + DBModelProvider.AliyunDashscope => new DashScopeConversationService(model), DBModelProvider.ZhiPuAI => new GLMConversationService(model), DBModelProvider.Moonshot => new KimiConversationService(model), DBModelProvider.HunYuan => new HunyuanConversationService(model), - DBModelProvider.Spark => throw new NotImplementedException("Spark model is not implemented"), + DBModelProvider.Sparkdesk => throw new NotImplementedException("Spark model is not implemented"), DBModelProvider.LingYi => throw new NotImplementedException("LingYi model is not implemented"), _ => throw new NotSupportedException($"Unknown model provider: {modelProvider}") }; From 0111640cfdc35275a0339b1d74f9a09d8b0ba642 Mon Sep 17 00:00:00 2001 From: sdcb Date: Fri, 6 Dec 2024 22:37:46 +0800 Subject: [PATCH 2/5] major renaming --- .../ChatCompletionOptionsTests.cs | 2 +- .../AdminMessage/AdminMessageController.cs | 4 +- .../AdminMessage/Dtos/AdminMessageRoot.cs | 6 +-- .../AdminModels/AdminModelsController.cs | 4 +- .../ModelKeys/Dtos/AutoCreateModelResult.cs | 2 +- .../Admin/ModelKeys/ModelKeysController.cs | 4 +- .../ChatController.cs} | 16 +++---- .../Dtos/ChatRequest.cs} | 4 +- .../Dtos/MessageLiteDto.cs | 4 +- .../Dtos/SseResponseLine.cs | 2 +- .../InsufficientBalanceException.cs | 4 +- .../UserModelBalanceCalculator.cs | 2 +- .../Chats/Messages/Dtos/MessageDto.cs | 2 +- .../Chats/Messages/MessagesController.cs | 2 +- .../Chats/Prompts/PromptsController.cs | 6 +-- .../Dtos/ChatsResponse.cs | 12 +++--- .../Dtos/CreateChatsRequest.cs | 2 +- .../Dtos/UpdateChatsRequest.cs | 2 +- .../UserChatsController.cs | 8 ++-- .../OpenAICompatibleController.cs | 10 ++--- src/BE/Program.cs | 4 +- .../CcoWrapper.cs | 2 +- .../ChatFactory.cs} | 42 +++++++++---------- .../ChatService.cs} | 10 ++--- .../ChatServiceExtensions.cs} | 8 ++-- .../DBChatRole.cs | 2 +- .../DBFinishReason.cs | 2 +- .../Dtos/ChatSegment.cs | 2 +- .../Dtos/ChatTokenUsage.cs | 2 +- .../Dtos/InternalChatSegment.cs | 2 +- .../ChatCompletionOptionsExtensions.cs | 2 +- .../ChatMessageContentPartExtensions.cs | 2 +- .../Extensions/ChatMessageExtensions.cs | 2 +- .../Azure/AzureChatService.cs} | 6 +-- .../Implementations/Azure/JsonAzureConfig.cs | 2 +- .../Azure/JsonAzureModelConfig.cs | 2 +- .../DashScope/DashScopeChatService.cs} | 10 ++--- .../DashScope/JsonDashScopeConfig.cs | 2 +- .../DashScope/JsonDashScopeModelConfig.cs | 2 +- .../Implementations/GLM/GLMChatService.cs | 8 ++++ .../Implementations/GLM/JsonGLMModelConfig.cs | 2 +- .../Hunyuan/HunyuanChatService.cs} | 8 ++-- .../Hunyuan/HuyuanChatSegment.cs | 2 +- .../Hunyuan/JsonHunyuanKeyConfig.cs | 2 +- .../Hunyuan/JsonHunyuanModelConfig.cs | 2 +- .../Hunyuan/UnixDateTimeOffsetConverter.cs | 2 +- .../Kimi/JsonKimiModelConfig.cs | 2 +- .../Implementations/Kimi/KimiChatService.cs | 6 +++ .../OpenAI/JsonOpenAIModelConfig.cs | 2 +- .../OpenAI/OpenAIChatService.cs} | 10 ++--- .../QianFan/JsonQianFanApiConfig.cs | 2 +- .../QianFan/JsonQianFanModelConfig.cs | 2 +- .../QianFan/QianFanChatService.cs} | 10 ++--- .../Implementations/Test/TestChatService.cs} | 6 +-- .../InChatContext.cs | 6 +-- .../ModelValidateResult.cs | 2 +- .../GLM/GLMConversationService.cs | 8 ---- .../Kimi/KimiConversationService.cs | 6 --- 58 files changed, 146 insertions(+), 146 deletions(-) rename src/BE/Controllers/Chats/{Conversations/ConversationController.cs => Chats/ChatController.cs} (94%) rename src/BE/Controllers/Chats/{Conversations/Dtos/ConversationRequest.cs => Chats/Dtos/ChatRequest.cs} (86%) rename src/BE/Controllers/Chats/{Conversations => Chats}/Dtos/MessageLiteDto.cs (92%) rename src/BE/Controllers/Chats/{Conversations => Chats}/Dtos/SseResponseLine.cs (80%) rename src/BE/Controllers/Chats/{Conversations => Chats}/InsufficientBalanceException.cs (89%) rename src/BE/Controllers/Chats/{Conversations => Chats}/UserModelBalanceCalculator.cs (98%) rename src/BE/Controllers/Chats/{Chats => UserChats}/Dtos/ChatsResponse.cs (88%) rename src/BE/Controllers/Chats/{Chats => UserChats}/Dtos/CreateChatsRequest.cs (75%) rename src/BE/Controllers/Chats/{Chats => UserChats}/Dtos/UpdateChatsRequest.cs (95%) rename src/BE/Controllers/Chats/{Chats => UserChats}/UserChatsController.cs (96%) rename src/BE/Services/{Conversations => ChatServices}/CcoWrapper.cs (97%) rename src/BE/Services/{Conversations/ConversationFactory.cs => ChatServices/ChatFactory.cs} (50%) rename src/BE/Services/{Conversations/ConversationService.cs => ChatServices/ChatService.cs} (89%) rename src/BE/Services/{Conversations/ConversationServiceExtensions.cs => ChatServices/ChatServiceExtensions.cs} (95%) rename src/BE/Services/{Conversations => ChatServices}/DBChatRole.cs (62%) rename src/BE/Services/{Conversations => ChatServices}/DBFinishReason.cs (87%) rename src/BE/Services/{Conversations => ChatServices}/Dtos/ChatSegment.cs (95%) rename src/BE/Services/{Conversations => ChatServices}/Dtos/ChatTokenUsage.cs (87%) rename src/BE/Services/{Conversations => ChatServices}/Dtos/InternalChatSegment.cs (98%) rename src/BE/Services/{Conversations => ChatServices}/Extensions/ChatCompletionOptionsExtensions.cs (97%) rename src/BE/Services/{Conversations => ChatServices}/Extensions/ChatMessageContentPartExtensions.cs (92%) rename src/BE/Services/{Conversations => ChatServices}/Extensions/ChatMessageExtensions.cs (85%) rename src/BE/Services/{Conversations/Implementations/Azure/AzureConversationService.cs => ChatServices/Implementations/Azure/AzureChatService.cs} (70%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/Azure/JsonAzureConfig.cs (86%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/Azure/JsonAzureModelConfig.cs (86%) rename src/BE/Services/{Conversations/Implementations/DashScope/DashScopeConversationService.cs => ChatServices/Implementations/DashScope/DashScopeChatService.cs} (94%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/DashScope/JsonDashScopeConfig.cs (77%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/DashScope/JsonDashScopeModelConfig.cs (86%) create mode 100644 src/BE/Services/ChatServices/Implementations/GLM/GLMChatService.cs rename src/BE/Services/{Conversations => ChatServices}/Implementations/GLM/JsonGLMModelConfig.cs (84%) rename src/BE/Services/{Conversations/Implementations/Hunyuan/HunyuanConversationService.cs => ChatServices/Implementations/Hunyuan/HunyuanChatService.cs} (95%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/Hunyuan/HuyuanChatSegment.cs (94%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/Hunyuan/JsonHunyuanKeyConfig.cs (79%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/Hunyuan/JsonHunyuanModelConfig.cs (83%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/Hunyuan/UnixDateTimeOffsetConverter.cs (91%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/Kimi/JsonKimiModelConfig.cs (79%) create mode 100644 src/BE/Services/ChatServices/Implementations/Kimi/KimiChatService.cs rename src/BE/Services/{Conversations => ChatServices}/Implementations/OpenAI/JsonOpenAIModelConfig.cs (83%) rename src/BE/Services/{Conversations/Implementations/OpenAI/OpenAIConversationService.cs => ChatServices/Implementations/OpenAI/OpenAIChatService.cs} (86%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/QianFan/JsonQianFanApiConfig.cs (78%) rename src/BE/Services/{Conversations => ChatServices}/Implementations/QianFan/JsonQianFanModelConfig.cs (86%) rename src/BE/Services/{Conversations/Implementations/QianFan/QianFanConversationService.cs => ChatServices/Implementations/QianFan/QianFanChatService.cs} (92%) rename src/BE/Services/{Conversations/Implementations/Test/TestConversationService.cs => ChatServices/Implementations/Test/TestChatService.cs} (95%) rename src/BE/Services/{Conversations => ChatServices}/InChatContext.cs (97%) rename src/BE/Services/{Conversations => ChatServices}/ModelValidateResult.cs (93%) delete mode 100644 src/BE/Services/Conversations/Implementations/GLM/GLMConversationService.cs delete mode 100644 src/BE/Services/Conversations/Implementations/Kimi/KimiConversationService.cs diff --git a/src/BE.Tests/Services/Conversations/ChatCompletionOptionsTests.cs b/src/BE.Tests/Services/Conversations/ChatCompletionOptionsTests.cs index 24909c7d..2a2ad9fc 100644 --- a/src/BE.Tests/Services/Conversations/ChatCompletionOptionsTests.cs +++ b/src/BE.Tests/Services/Conversations/ChatCompletionOptionsTests.cs @@ -1,4 +1,4 @@ -using Chats.BE.Services.Conversations.Extensions; +using Chats.BE.Services.ChatServices.Extensions; using OpenAI.Chat; using System.Runtime.CompilerServices; diff --git a/src/BE/Controllers/Admin/AdminMessage/AdminMessageController.cs b/src/BE/Controllers/Admin/AdminMessage/AdminMessageController.cs index 52cc0627..dc75834c 100644 --- a/src/BE/Controllers/Admin/AdminMessage/AdminMessageController.cs +++ b/src/BE/Controllers/Admin/AdminMessage/AdminMessageController.cs @@ -1,12 +1,12 @@ using Chats.BE.Controllers.Admin.AdminMessage.Dtos; using Chats.BE.Controllers.Admin.Common; -using Chats.BE.Controllers.Chats.Conversations.Dtos; +using Chats.BE.Controllers.Chats.Chats.Dtos; using Chats.BE.Controllers.Common.Dtos; using Chats.BE.DB; using Chats.BE.DB.Enums; using Chats.BE.DB.Jsons; using Chats.BE.Infrastructure; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.FileServices; using Chats.BE.Services.UrlEncryption; using Microsoft.AspNetCore.Mvc; diff --git a/src/BE/Controllers/Admin/AdminMessage/Dtos/AdminMessageRoot.cs b/src/BE/Controllers/Admin/AdminMessage/Dtos/AdminMessageRoot.cs index 518d1b1f..789d007f 100644 --- a/src/BE/Controllers/Admin/AdminMessage/Dtos/AdminMessageRoot.cs +++ b/src/BE/Controllers/Admin/AdminMessage/Dtos/AdminMessageRoot.cs @@ -1,7 +1,7 @@ -using Chats.BE.Controllers.Chats.Conversations.Dtos; +using Chats.BE.Controllers.Chats.Chats.Dtos; using Chats.BE.Controllers.Chats.Messages.Dtos; using Chats.BE.DB; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.FileServices; using Chats.BE.Services.UrlEncryption; using System.Text.Json.Serialization; @@ -39,7 +39,7 @@ public AdminMessageRoot ToDto(AdminMessageBasicItem[] messages) { Name = Name, ModelName = ModelName, - ModelTemperature = Temperature ?? ConversationService.DefaultTemperature, + ModelTemperature = Temperature ?? ChatService.DefaultTemperature, ModelPrompt = messages.FirstOrDefault(x => x.Role.Equals(DBChatRole.System.ToString(), StringComparison.OrdinalIgnoreCase))?.Content.Text, Messages = messages.Where(x => !x.Role.Equals(DBChatRole.System.ToString(), StringComparison.OrdinalIgnoreCase)).ToArray(), }; diff --git a/src/BE/Controllers/Admin/AdminModels/AdminModelsController.cs b/src/BE/Controllers/Admin/AdminModels/AdminModelsController.cs index b9dec66b..1605ebc1 100644 --- a/src/BE/Controllers/Admin/AdminModels/AdminModelsController.cs +++ b/src/BE/Controllers/Admin/AdminModels/AdminModelsController.cs @@ -5,7 +5,7 @@ using Chats.BE.DB.Jsons; using Chats.BE.Infrastructure; using Chats.BE.Services; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.ModelBinding; using Microsoft.EntityFrameworkCore; @@ -182,7 +182,7 @@ public async Task DeleteModel(short modelId, CancellationToken can [HttpPost("models/validate")] public async Task> ValidateModel( [FromBody] ValidateModelRequest req, - [FromServices] ConversationFactory conversationFactory, + [FromServices] ChatFactory conversationFactory, CancellationToken cancellationToken) { ModelKey? modelKey = await db.ModelKeys diff --git a/src/BE/Controllers/Admin/ModelKeys/Dtos/AutoCreateModelResult.cs b/src/BE/Controllers/Admin/ModelKeys/Dtos/AutoCreateModelResult.cs index 1e53b465..9e976dbc 100644 --- a/src/BE/Controllers/Admin/ModelKeys/Dtos/AutoCreateModelResult.cs +++ b/src/BE/Controllers/Admin/ModelKeys/Dtos/AutoCreateModelResult.cs @@ -1,5 +1,5 @@ using Chats.BE.DB; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using System.Text.Json.Serialization; namespace Chats.BE.Controllers.Admin.ModelKeys.Dtos; diff --git a/src/BE/Controllers/Admin/ModelKeys/ModelKeysController.cs b/src/BE/Controllers/Admin/ModelKeys/ModelKeysController.cs index 7671d926..8a01c9cb 100644 --- a/src/BE/Controllers/Admin/ModelKeys/ModelKeysController.cs +++ b/src/BE/Controllers/Admin/ModelKeys/ModelKeysController.cs @@ -3,7 +3,7 @@ using Chats.BE.Controllers.Common; using Chats.BE.DB; using Chats.BE.Services.Common; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Microsoft.AspNetCore.Mvc; using Microsoft.EntityFrameworkCore; @@ -115,7 +115,7 @@ public async Task DeleteModelKey(short modelKeyId, CancellationTok } [HttpPost("{modelKeyId:int}/auto-create-models")] - public async Task> AutoCreateModels(short modelKeyId, [FromServices] ConversationFactory conversationFactory, CancellationToken cancellationToken) + public async Task> AutoCreateModels(short modelKeyId, [FromServices] ChatFactory conversationFactory, CancellationToken cancellationToken) { ModelKey? modelKey = await db .ModelKeys diff --git a/src/BE/Controllers/Chats/Conversations/ConversationController.cs b/src/BE/Controllers/Chats/Chats/ChatController.cs similarity index 94% rename from src/BE/Controllers/Chats/Conversations/ConversationController.cs rename to src/BE/Controllers/Chats/Chats/ChatController.cs index 3c38ec5a..d9734cfe 100644 --- a/src/BE/Controllers/Chats/Conversations/ConversationController.cs +++ b/src/BE/Controllers/Chats/Chats/ChatController.cs @@ -1,11 +1,11 @@ -using Chats.BE.Controllers.Chats.Conversations.Dtos; +using Chats.BE.Controllers.Chats.Chats.Dtos; using Chats.BE.Controllers.Common; using Chats.BE.DB; using Chats.BE.DB.Jsons; using Chats.BE.Infrastructure; using Chats.BE.Services; -using Chats.BE.Services.Conversations; -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices; +using Chats.BE.Services.ChatServices.Dtos; using Chats.BE.Services.FileServices; using Chats.BE.Services.UrlEncryption; using Microsoft.AspNetCore.Authorization; @@ -18,16 +18,16 @@ using TencentCloud.Common; using OpenAIChatMessage = OpenAI.Chat.ChatMessage; -namespace Chats.BE.Controllers.Chats.Conversations; +namespace Chats.BE.Controllers.Chats.Chats; [Route("api/chats"), Authorize] -public class ConversationController(ChatsDB db, CurrentUser currentUser, ILogger logger, IUrlEncryptionService idEncryption) : ControllerBase +public class ChatController(ChatsDB db, CurrentUser currentUser, ILogger logger, IUrlEncryptionService idEncryption) : ControllerBase { [HttpPost] public async Task StartConversationStreamed( - [FromBody] ConversationRequest request, + [FromBody] ChatRequest request, [FromServices] BalanceService balanceService, - [FromServices] ConversationFactory conversationFactory, + [FromServices] ChatFactory conversationFactory, [FromServices] UserModelManager userModelManager, [FromServices] ClientInfoManager clientInfoManager, [FromServices] FileUrlProvider fileDownloadUrlProvider, @@ -155,7 +155,7 @@ ..await GetMessageTree(existingMessages, messageId).ToAsyncEnumerable().SelectAw throw new InvalidModelException(request.ModelId.ToString()); } - using ConversationService s = conversationFactory.CreateConversationService(userModel.Model); + using ChatService s = conversationFactory.CreateConversationService(userModel.Model); ChatCompletionOptions cco = new() { Temperature = request.UserModelConfig.Temperature, diff --git a/src/BE/Controllers/Chats/Conversations/Dtos/ConversationRequest.cs b/src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs similarity index 86% rename from src/BE/Controllers/Chats/Conversations/Dtos/ConversationRequest.cs rename to src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs index 8626b1fc..1faacd83 100644 --- a/src/BE/Controllers/Chats/Conversations/Dtos/ConversationRequest.cs +++ b/src/BE/Controllers/Chats/Chats/Dtos/ChatRequest.cs @@ -2,9 +2,9 @@ using Chats.BE.DB.Jsons; using System.Text.Json.Serialization; -namespace Chats.BE.Controllers.Chats.Conversations.Dtos; +namespace Chats.BE.Controllers.Chats.Chats.Dtos; -public record ConversationRequest +public record ChatRequest { [JsonPropertyName("modelId")] public required short ModelId { get; init; } diff --git a/src/BE/Controllers/Chats/Conversations/Dtos/MessageLiteDto.cs b/src/BE/Controllers/Chats/Chats/Dtos/MessageLiteDto.cs similarity index 92% rename from src/BE/Controllers/Chats/Conversations/Dtos/MessageLiteDto.cs rename to src/BE/Controllers/Chats/Chats/Dtos/MessageLiteDto.cs index c6cb8b01..775b7c3f 100644 --- a/src/BE/Controllers/Chats/Conversations/Dtos/MessageLiteDto.cs +++ b/src/BE/Controllers/Chats/Chats/Dtos/MessageLiteDto.cs @@ -1,11 +1,11 @@ using Chats.BE.DB; using Chats.BE.DB.Enums; using Chats.BE.DB.Extensions; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.FileServices; using OpenAI.Chat; -namespace Chats.BE.Controllers.Chats.Conversations.Dtos; +namespace Chats.BE.Controllers.Chats.Chats.Dtos; public record MessageLiteDto { diff --git a/src/BE/Controllers/Chats/Conversations/Dtos/SseResponseLine.cs b/src/BE/Controllers/Chats/Chats/Dtos/SseResponseLine.cs similarity index 80% rename from src/BE/Controllers/Chats/Conversations/Dtos/SseResponseLine.cs rename to src/BE/Controllers/Chats/Chats/Dtos/SseResponseLine.cs index d9a792b7..a12074f1 100644 --- a/src/BE/Controllers/Chats/Conversations/Dtos/SseResponseLine.cs +++ b/src/BE/Controllers/Chats/Chats/Dtos/SseResponseLine.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Controllers.Chats.Conversations.Dtos; +namespace Chats.BE.Controllers.Chats.Chats.Dtos; public record SseResponseLine { diff --git a/src/BE/Controllers/Chats/Conversations/InsufficientBalanceException.cs b/src/BE/Controllers/Chats/Chats/InsufficientBalanceException.cs similarity index 89% rename from src/BE/Controllers/Chats/Conversations/InsufficientBalanceException.cs rename to src/BE/Controllers/Chats/Chats/InsufficientBalanceException.cs index db79f7a1..728e6cbb 100644 --- a/src/BE/Controllers/Chats/Conversations/InsufficientBalanceException.cs +++ b/src/BE/Controllers/Chats/Chats/InsufficientBalanceException.cs @@ -1,6 +1,6 @@ -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; -namespace Chats.BE.Controllers.Chats.Conversations; +namespace Chats.BE.Controllers.Chats.Chats; public abstract class ChatServiceException(DBFinishReason errorCode) : Exception { diff --git a/src/BE/Controllers/Chats/Conversations/UserModelBalanceCalculator.cs b/src/BE/Controllers/Chats/Chats/UserModelBalanceCalculator.cs similarity index 98% rename from src/BE/Controllers/Chats/Conversations/UserModelBalanceCalculator.cs rename to src/BE/Controllers/Chats/Chats/UserModelBalanceCalculator.cs index e7ad8c16..a9076d56 100644 --- a/src/BE/Controllers/Chats/Conversations/UserModelBalanceCalculator.cs +++ b/src/BE/Controllers/Chats/Chats/UserModelBalanceCalculator.cs @@ -1,7 +1,7 @@ using Chats.BE.DB; using Chats.BE.DB.Jsons; -namespace Chats.BE.Controllers.Chats.Conversations; +namespace Chats.BE.Controllers.Chats.Chats; public record UserModelBalanceCost(int Counts, int Tokens, decimal Balance, int CostCount, int CostTokens, decimal InputTokenPrice, decimal OutputTokenPrice) { diff --git a/src/BE/Controllers/Chats/Messages/Dtos/MessageDto.cs b/src/BE/Controllers/Chats/Messages/Dtos/MessageDto.cs index 8fdebf99..125c5405 100644 --- a/src/BE/Controllers/Chats/Messages/Dtos/MessageDto.cs +++ b/src/BE/Controllers/Chats/Messages/Dtos/MessageDto.cs @@ -1,6 +1,6 @@ using Chats.BE.DB; using Chats.BE.DB.Enums; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.FileServices; using Chats.BE.Services.UrlEncryption; using System.Text.Json.Serialization; diff --git a/src/BE/Controllers/Chats/Messages/MessagesController.cs b/src/BE/Controllers/Chats/Messages/MessagesController.cs index f14732b5..1c923eaa 100644 --- a/src/BE/Controllers/Chats/Messages/MessagesController.cs +++ b/src/BE/Controllers/Chats/Messages/MessagesController.cs @@ -2,7 +2,7 @@ using Chats.BE.DB; using Chats.BE.DB.Enums; using Chats.BE.Infrastructure; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.FileServices; using Chats.BE.Services.UrlEncryption; using Microsoft.AspNetCore.Authorization; diff --git a/src/BE/Controllers/Chats/Prompts/PromptsController.cs b/src/BE/Controllers/Chats/Prompts/PromptsController.cs index 65471883..498f0b0e 100644 --- a/src/BE/Controllers/Chats/Prompts/PromptsController.cs +++ b/src/BE/Controllers/Chats/Prompts/PromptsController.cs @@ -1,7 +1,7 @@ using Chats.BE.Controllers.Chats.Prompts.Dtos; using Chats.BE.DB; using Chats.BE.Infrastructure; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.EntityFrameworkCore; @@ -97,8 +97,8 @@ public async Task> GetDefaultPrompt(CancellationToken ca IsSystem = consolidated.IsSystem, } : new PromptDto() { - Content = ConversationService.DefaultPrompt, - Temperature = ConversationService.DefaultTemperature, + Content = ChatService.DefaultPrompt, + Temperature = ChatService.DefaultTemperature, Id = -1, IsDefault = true, Name = "Default", diff --git a/src/BE/Controllers/Chats/Chats/Dtos/ChatsResponse.cs b/src/BE/Controllers/Chats/UserChats/Dtos/ChatsResponse.cs similarity index 88% rename from src/BE/Controllers/Chats/Chats/Dtos/ChatsResponse.cs rename to src/BE/Controllers/Chats/UserChats/Dtos/ChatsResponse.cs index 63683651..fed215eb 100644 --- a/src/BE/Controllers/Chats/Chats/Dtos/ChatsResponse.cs +++ b/src/BE/Controllers/Chats/UserChats/Dtos/ChatsResponse.cs @@ -1,11 +1,11 @@ using Chats.BE.DB; using Chats.BE.DB.Enums; using Chats.BE.DB.Jsons; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.UrlEncryption; using System.Text.Json.Serialization; -namespace Chats.BE.Controllers.Chats.Chats.Dtos; +namespace Chats.BE.Controllers.Chats.UserChats.Dtos; public record ChatsResponse { @@ -46,9 +46,9 @@ public static ChatsResponse FromDB(Chat chat, IUrlEncryptionService idEncryption DeploymentName = chat.Model.DeploymentName, EnableSearch = chat.Model.ModelReference.AllowSearch, MaxLength = chat.Model.ModelReference.MaxResponseTokens, - Temperature = ConversationService.DefaultTemperature, + Temperature = ChatService.DefaultTemperature, Version = chat.Model.ModelKey.ModelProvider.Name, - Prompt = ConversationService.DefaultPrompt, + Prompt = ChatService.DefaultPrompt, }, UserModelConfig = new JsonUserModelConfig { EnableSearch = chat.EnableSearch, Temperature = chat.Temperature }, IsShared = chat.IsShared, @@ -87,8 +87,8 @@ public ChatsResponse ToResponse(IUrlEncryptionService idEncryption) ModelName = ModelName, ModelConfig = new JsonModelConfig { - Prompt = ConversationService.DefaultPrompt, - Temperature = Temperature ?? ConversationService.DefaultTemperature, + Prompt = ChatService.DefaultPrompt, + Temperature = Temperature ?? ChatService.DefaultTemperature, }, UserModelConfig = UserModelConfig, IsShared = IsShared, diff --git a/src/BE/Controllers/Chats/Chats/Dtos/CreateChatsRequest.cs b/src/BE/Controllers/Chats/UserChats/Dtos/CreateChatsRequest.cs similarity index 75% rename from src/BE/Controllers/Chats/Chats/Dtos/CreateChatsRequest.cs rename to src/BE/Controllers/Chats/UserChats/Dtos/CreateChatsRequest.cs index 7861a20a..528503c2 100644 --- a/src/BE/Controllers/Chats/Chats/Dtos/CreateChatsRequest.cs +++ b/src/BE/Controllers/Chats/UserChats/Dtos/CreateChatsRequest.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Controllers.Chats.Chats.Dtos; +namespace Chats.BE.Controllers.Chats.UserChats.Dtos; public record CreateChatsRequest { diff --git a/src/BE/Controllers/Chats/Chats/Dtos/UpdateChatsRequest.cs b/src/BE/Controllers/Chats/UserChats/Dtos/UpdateChatsRequest.cs similarity index 95% rename from src/BE/Controllers/Chats/Chats/Dtos/UpdateChatsRequest.cs rename to src/BE/Controllers/Chats/UserChats/Dtos/UpdateChatsRequest.cs index 419f93a5..7b2d9204 100644 --- a/src/BE/Controllers/Chats/Chats/Dtos/UpdateChatsRequest.cs +++ b/src/BE/Controllers/Chats/UserChats/Dtos/UpdateChatsRequest.cs @@ -3,7 +3,7 @@ using System.Text.Json; using System.Text.Json.Serialization; -namespace Chats.BE.Controllers.Chats.Chats.Dtos; +namespace Chats.BE.Controllers.Chats.UserChats.Dtos; public class UpdateChatsRequest { diff --git a/src/BE/Controllers/Chats/Chats/UserChatsController.cs b/src/BE/Controllers/Chats/UserChats/UserChatsController.cs similarity index 96% rename from src/BE/Controllers/Chats/Chats/UserChatsController.cs rename to src/BE/Controllers/Chats/UserChats/UserChatsController.cs index 8c8f9727..fef28546 100644 --- a/src/BE/Controllers/Chats/Chats/UserChatsController.cs +++ b/src/BE/Controllers/Chats/UserChats/UserChatsController.cs @@ -1,4 +1,4 @@ -using Chats.BE.Controllers.Chats.Chats.Dtos; +using Chats.BE.Controllers.Chats.UserChats.Dtos; using Chats.BE.Controllers.Common; using Chats.BE.Controllers.Common.Dtos; using Chats.BE.DB; @@ -7,13 +7,13 @@ using Chats.BE.Infrastructure; using Chats.BE.Services; using Chats.BE.Services.Common; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.UrlEncryption; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.EntityFrameworkCore; -namespace Chats.BE.Controllers.Chats.Chats; +namespace Chats.BE.Controllers.Chats.UserChats; [Route("api/user/chats"), Authorize] public class UserChatsController(ChatsDB db, CurrentUser currentUser, IUrlEncryptionService idEncryption) : ControllerBase @@ -69,7 +69,7 @@ public async Task>> GetChats([FromQuery] ChatModelId = x.ModelId, ModelName = x.Model!.Name, EnableSearch = x.EnableSearch, - Temperature = ConversationService.DefaultTemperature, + Temperature = ChatService.DefaultTemperature, IsShared = x.IsShared, ModelProvider = (DBModelProvider)x.Model.ModelKey.ModelProviderId, UserModelConfig = new JsonUserModelConfig diff --git a/src/BE/Controllers/OpenAICompatible/OpenAICompatibleController.cs b/src/BE/Controllers/OpenAICompatible/OpenAICompatibleController.cs index 33aeceb9..1397b096 100644 --- a/src/BE/Controllers/OpenAICompatible/OpenAICompatibleController.cs +++ b/src/BE/Controllers/OpenAICompatible/OpenAICompatibleController.cs @@ -1,8 +1,8 @@ -using Chats.BE.Controllers.Chats.Conversations; +using Chats.BE.Controllers.Chats.Chats; using Chats.BE.DB; using Chats.BE.Services; -using Chats.BE.Services.Conversations; -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices; +using Chats.BE.Services.ChatServices.Dtos; using Chats.BE.Services.OpenAIApiKeySession; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; @@ -16,7 +16,7 @@ namespace Chats.BE.Controllers.OpenAICompatible; [Route("v1"), Authorize(AuthenticationSchemes = "OpenAIApiKey")] -public partial class OpenAICompatibleController(ChatsDB db, CurrentApiKey currentApiKey, ConversationFactory cf, UserModelManager userModelManager, ILogger logger, BalanceService balanceService) : ControllerBase +public partial class OpenAICompatibleController(ChatsDB db, CurrentApiKey currentApiKey, ChatFactory cf, UserModelManager userModelManager, ILogger logger, BalanceService balanceService) : ControllerBase { [HttpPost("chat/completions")] public async Task ChatCompletion([FromBody] JsonObject json, [FromServices] ClientInfoManager clientInfoManager, CancellationToken cancellationToken) @@ -36,7 +36,7 @@ public async Task ChatCompletion([FromBody] JsonObject json, [From if (userModel == null) return InvalidModel(cco.Model); Model cm = userModel.Model; - using ConversationService s = cf.CreateConversationService(cm); + using ChatService s = cf.CreateConversationService(cm); UserBalance userBalance = await db.UserBalances .Where(x => x.UserId == currentApiKey.User.Id) diff --git a/src/BE/Program.cs b/src/BE/Program.cs index 08f54c13..a90bc8fd 100644 --- a/src/BE/Program.cs +++ b/src/BE/Program.cs @@ -3,7 +3,7 @@ using Chats.BE.Infrastructure; using Chats.BE.Services; using Chats.BE.Services.Configs; -using Chats.BE.Services.Conversations; +using Chats.BE.Services.ChatServices; using Chats.BE.Services.UrlEncryption; using Chats.BE.Services.OpenAIApiKeySession; using Chats.BE.Services.Sessions; @@ -50,7 +50,7 @@ public static async Task Main(string[] args) builder.Services.AddSingleton(); builder.Services.AddScoped(); builder.Services.AddSingleton(); - builder.Services.AddSingleton(); + builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddScoped(); builder.Services.AddScoped(); diff --git a/src/BE/Services/Conversations/CcoWrapper.cs b/src/BE/Services/ChatServices/CcoWrapper.cs similarity index 97% rename from src/BE/Services/Conversations/CcoWrapper.cs rename to src/BE/Services/ChatServices/CcoWrapper.cs index f923168c..89160158 100644 --- a/src/BE/Services/Conversations/CcoWrapper.cs +++ b/src/BE/Services/ChatServices/CcoWrapper.cs @@ -3,7 +3,7 @@ using System.Text.Json; using System.Text.Json.Nodes; -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; public class CcoWrapper(JsonObject json) { diff --git a/src/BE/Services/Conversations/ConversationFactory.cs b/src/BE/Services/ChatServices/ChatFactory.cs similarity index 50% rename from src/BE/Services/Conversations/ConversationFactory.cs rename to src/BE/Services/ChatServices/ChatFactory.cs index 0b5d4cc0..0befd67a 100644 --- a/src/BE/Services/Conversations/ConversationFactory.cs +++ b/src/BE/Services/ChatServices/ChatFactory.cs @@ -1,32 +1,32 @@ using Chats.BE.DB; using Chats.BE.DB.Enums; -using Chats.BE.Services.Conversations.Implementations.Azure; -using Chats.BE.Services.Conversations.Implementations.DashScope; -using Chats.BE.Services.Conversations.Implementations.GLM; -using Chats.BE.Services.Conversations.Implementations.Hunyuan; -using Chats.BE.Services.Conversations.Implementations.Kimi; -using Chats.BE.Services.Conversations.Implementations.OpenAI; -using Chats.BE.Services.Conversations.Implementations.QianFan; -using Chats.BE.Services.Conversations.Implementations.Test; +using Chats.BE.Services.ChatServices.Implementations.Azure; +using Chats.BE.Services.ChatServices.Implementations.DashScope; +using Chats.BE.Services.ChatServices.Implementations.GLM; +using Chats.BE.Services.ChatServices.Implementations.Hunyuan; +using Chats.BE.Services.ChatServices.Implementations.Kimi; +using Chats.BE.Services.ChatServices.Implementations.OpenAI; +using Chats.BE.Services.ChatServices.Implementations.QianFan; +using Chats.BE.Services.ChatServices.Implementations.Test; using OpenAI.Chat; -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; -public class ConversationFactory(ILogger logger) +public class ChatFactory(ILogger logger) { - public ConversationService CreateConversationService(Model model) + public ChatService CreateConversationService(Model model) { DBModelProvider modelProvider = (DBModelProvider)model.ModelKey.ModelProviderId; - ConversationService cs = modelProvider switch + ChatService cs = modelProvider switch { - DBModelProvider.Test => new TestConversationService(model), - DBModelProvider.OpenAI => new OpenAIConversationService(model), - DBModelProvider.AzureOpenAI => new AzureConversationService(model), - DBModelProvider.WenXinQianFan => new QianFanConversationService(model), - DBModelProvider.AliyunDashscope => new DashScopeConversationService(model), - DBModelProvider.ZhiPuAI => new GLMConversationService(model), - DBModelProvider.Moonshot => new KimiConversationService(model), - DBModelProvider.HunYuan => new HunyuanConversationService(model), + DBModelProvider.Test => new TestChatService(model), + DBModelProvider.OpenAI => new OpenAIChatService(model), + DBModelProvider.AzureOpenAI => new AzureChatService(model), + DBModelProvider.WenXinQianFan => new QianFanChatService(model), + DBModelProvider.AliyunDashscope => new DashScopeChatService(model), + DBModelProvider.ZhiPuAI => new GLMChatService(model), + DBModelProvider.Moonshot => new KimiChatService(model), + DBModelProvider.HunYuan => new HunyuanChatService(model), DBModelProvider.Sparkdesk => throw new NotImplementedException("Spark model is not implemented"), DBModelProvider.LingYi => throw new NotImplementedException("LingYi model is not implemented"), _ => throw new NotSupportedException($"Unknown model provider: {modelProvider}") @@ -36,7 +36,7 @@ public ConversationService CreateConversationService(Model model) public async Task ValidateModel(ModelKey modelKey, ModelReference modelReference, string? deploymentName, CancellationToken cancellationToken) { - using ConversationService cs = CreateConversationService(new Model + using ChatService cs = CreateConversationService(new Model { ModelKey = modelKey, ModelReference = modelReference, diff --git a/src/BE/Services/Conversations/ConversationService.cs b/src/BE/Services/ChatServices/ChatService.cs similarity index 89% rename from src/BE/Services/Conversations/ConversationService.cs rename to src/BE/Services/ChatServices/ChatService.cs index a7c98aa3..83f1acd1 100644 --- a/src/BE/Services/Conversations/ConversationService.cs +++ b/src/BE/Services/ChatServices/ChatService.cs @@ -1,14 +1,14 @@ using Chats.BE.DB; -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices.Dtos; using Tokenizer = Microsoft.ML.Tokenizers.Tokenizer; using OpenAI.Chat; using System.Text; using Microsoft.ML.Tokenizers; -using Chats.BE.Services.Conversations.Extensions; +using Chats.BE.Services.ChatServices.Extensions; -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; -public abstract partial class ConversationService : IDisposable +public abstract partial class ChatService : IDisposable { public const float DefaultTemperature = 0.5f; public const string DefaultPrompt = "你是{{MODEL_NAME}},请仔细遵循用户指令并认真回复,当前日期: {{CURRENT_DATE}}"; @@ -16,7 +16,7 @@ public abstract partial class ConversationService : IDisposable internal protected Model Model { get; } internal protected Tokenizer Tokenizer { get; } - public ConversationService(Model model) + public ChatService(Model model) { Model = model; try diff --git a/src/BE/Services/Conversations/ConversationServiceExtensions.cs b/src/BE/Services/ChatServices/ChatServiceExtensions.cs similarity index 95% rename from src/BE/Services/Conversations/ConversationServiceExtensions.cs rename to src/BE/Services/ChatServices/ChatServiceExtensions.cs index 93174308..dc61f33f 100644 --- a/src/BE/Services/Conversations/ConversationServiceExtensions.cs +++ b/src/BE/Services/ChatServices/ChatServiceExtensions.cs @@ -1,11 +1,11 @@ -using Chats.BE.Services.Conversations.Dtos; -using Chats.BE.Services.Conversations.Extensions; +using Chats.BE.Services.ChatServices.Dtos; +using Chats.BE.Services.ChatServices.Extensions; using OpenAI.Chat; using System.Runtime.CompilerServices; -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; -public abstract partial class ConversationService +public abstract partial class ChatService { public async IAsyncEnumerable ChatStreamedFEProcessed(IReadOnlyList messages, ChatCompletionOptions options, [EnumeratorCancellation] CancellationToken cancellationToken) { diff --git a/src/BE/Services/Conversations/DBChatRole.cs b/src/BE/Services/ChatServices/DBChatRole.cs similarity index 62% rename from src/BE/Services/Conversations/DBChatRole.cs rename to src/BE/Services/ChatServices/DBChatRole.cs index f546fdb7..2b490202 100644 --- a/src/BE/Services/Conversations/DBChatRole.cs +++ b/src/BE/Services/ChatServices/DBChatRole.cs @@ -1,4 +1,4 @@ -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; public enum DBChatRole { diff --git a/src/BE/Services/Conversations/DBFinishReason.cs b/src/BE/Services/ChatServices/DBFinishReason.cs similarity index 87% rename from src/BE/Services/Conversations/DBFinishReason.cs rename to src/BE/Services/ChatServices/DBFinishReason.cs index 49f36a04..58ae1e7d 100644 --- a/src/BE/Services/Conversations/DBFinishReason.cs +++ b/src/BE/Services/ChatServices/DBFinishReason.cs @@ -1,4 +1,4 @@ -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; public enum DBFinishReason : byte { diff --git a/src/BE/Services/Conversations/Dtos/ChatSegment.cs b/src/BE/Services/ChatServices/Dtos/ChatSegment.cs similarity index 95% rename from src/BE/Services/Conversations/Dtos/ChatSegment.cs rename to src/BE/Services/ChatServices/Dtos/ChatSegment.cs index 8e1ab811..76439448 100644 --- a/src/BE/Services/Conversations/Dtos/ChatSegment.cs +++ b/src/BE/Services/ChatServices/Dtos/ChatSegment.cs @@ -1,6 +1,6 @@ using OpenAI.Chat; -namespace Chats.BE.Services.Conversations.Dtos; +namespace Chats.BE.Services.ChatServices.Dtos; public record ChatSegment { diff --git a/src/BE/Services/Conversations/Dtos/ChatTokenUsage.cs b/src/BE/Services/ChatServices/Dtos/ChatTokenUsage.cs similarity index 87% rename from src/BE/Services/Conversations/Dtos/ChatTokenUsage.cs rename to src/BE/Services/ChatServices/Dtos/ChatTokenUsage.cs index f7e85265..2bbdec75 100644 --- a/src/BE/Services/Conversations/Dtos/ChatTokenUsage.cs +++ b/src/BE/Services/ChatServices/Dtos/ChatTokenUsage.cs @@ -1,4 +1,4 @@ -namespace Chats.BE.Services.Conversations.Dtos; +namespace Chats.BE.Services.ChatServices.Dtos; public record ChatTokenUsage { diff --git a/src/BE/Services/Conversations/Dtos/InternalChatSegment.cs b/src/BE/Services/ChatServices/Dtos/InternalChatSegment.cs similarity index 98% rename from src/BE/Services/Conversations/Dtos/InternalChatSegment.cs rename to src/BE/Services/ChatServices/Dtos/InternalChatSegment.cs index 35aa758e..63d89e17 100644 --- a/src/BE/Services/Conversations/Dtos/InternalChatSegment.cs +++ b/src/BE/Services/ChatServices/Dtos/InternalChatSegment.cs @@ -1,7 +1,7 @@ using Chats.BE.Controllers.OpenAICompatible.Dtos; using OpenAI.Chat; -namespace Chats.BE.Services.Conversations.Dtos; +namespace Chats.BE.Services.ChatServices.Dtos; public record InternalChatSegment { diff --git a/src/BE/Services/Conversations/Extensions/ChatCompletionOptionsExtensions.cs b/src/BE/Services/ChatServices/Extensions/ChatCompletionOptionsExtensions.cs similarity index 97% rename from src/BE/Services/Conversations/Extensions/ChatCompletionOptionsExtensions.cs rename to src/BE/Services/ChatServices/Extensions/ChatCompletionOptionsExtensions.cs index 580d69bb..56eaef85 100644 --- a/src/BE/Services/Conversations/Extensions/ChatCompletionOptionsExtensions.cs +++ b/src/BE/Services/ChatServices/Extensions/ChatCompletionOptionsExtensions.cs @@ -2,7 +2,7 @@ using System.Reflection; using System.Runtime.CompilerServices; -namespace Chats.BE.Services.Conversations.Extensions; +namespace Chats.BE.Services.ChatServices.Extensions; public static class ChatCompletionOptionsExtensions { diff --git a/src/BE/Services/Conversations/Extensions/ChatMessageContentPartExtensions.cs b/src/BE/Services/ChatServices/Extensions/ChatMessageContentPartExtensions.cs similarity index 92% rename from src/BE/Services/Conversations/Extensions/ChatMessageContentPartExtensions.cs rename to src/BE/Services/ChatServices/Extensions/ChatMessageContentPartExtensions.cs index b3b5ef0d..07b35aef 100644 --- a/src/BE/Services/Conversations/Extensions/ChatMessageContentPartExtensions.cs +++ b/src/BE/Services/ChatServices/Extensions/ChatMessageContentPartExtensions.cs @@ -1,7 +1,7 @@ using Microsoft.ML.Tokenizers; using OpenAI.Chat; -namespace Chats.BE.Services.Conversations.Extensions; +namespace Chats.BE.Services.ChatServices.Extensions; public static class ChatMessageContentPartExtensions { diff --git a/src/BE/Services/Conversations/Extensions/ChatMessageExtensions.cs b/src/BE/Services/ChatServices/Extensions/ChatMessageExtensions.cs similarity index 85% rename from src/BE/Services/Conversations/Extensions/ChatMessageExtensions.cs rename to src/BE/Services/ChatServices/Extensions/ChatMessageExtensions.cs index e10da32a..c5a1f845 100644 --- a/src/BE/Services/Conversations/Extensions/ChatMessageExtensions.cs +++ b/src/BE/Services/ChatServices/Extensions/ChatMessageExtensions.cs @@ -1,7 +1,7 @@ using Microsoft.ML.Tokenizers; using OpenAI.Chat; -namespace Chats.BE.Services.Conversations.Extensions; +namespace Chats.BE.Services.ChatServices.Extensions; public static class ChatMessageExtensions { diff --git a/src/BE/Services/Conversations/Implementations/Azure/AzureConversationService.cs b/src/BE/Services/ChatServices/Implementations/Azure/AzureChatService.cs similarity index 70% rename from src/BE/Services/Conversations/Implementations/Azure/AzureConversationService.cs rename to src/BE/Services/ChatServices/Implementations/Azure/AzureChatService.cs index 1ef7efb6..027532f6 100644 --- a/src/BE/Services/Conversations/Implementations/Azure/AzureConversationService.cs +++ b/src/BE/Services/ChatServices/Implementations/Azure/AzureChatService.cs @@ -1,13 +1,13 @@ using Azure.AI.OpenAI; using Chats.BE.DB; -using Chats.BE.Services.Conversations.Implementations.OpenAI; +using Chats.BE.Services.ChatServices.Implementations.OpenAI; using OpenAI; using OpenAI.Chat; using System.ClientModel; -namespace Chats.BE.Services.Conversations.Implementations.Azure; +namespace Chats.BE.Services.ChatServices.Implementations.Azure; -public class AzureConversationService(Model model) : OpenAIConversationService(model, CreateChatClient(model)) +public class AzureChatService(Model model) : OpenAIChatService(model, CreateChatClient(model)) { static ChatClient CreateChatClient(Model model) { diff --git a/src/BE/Services/Conversations/Implementations/Azure/JsonAzureConfig.cs b/src/BE/Services/ChatServices/Implementations/Azure/JsonAzureConfig.cs similarity index 86% rename from src/BE/Services/Conversations/Implementations/Azure/JsonAzureConfig.cs rename to src/BE/Services/ChatServices/Implementations/Azure/JsonAzureConfig.cs index 00791f99..f3d80fb5 100644 --- a/src/BE/Services/Conversations/Implementations/Azure/JsonAzureConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/Azure/JsonAzureConfig.cs @@ -1,7 +1,7 @@ using System.Text.Json; using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.Azure; +namespace Chats.BE.Services.ChatServices.Implementations.Azure; public record JsonAzureApiConfig { diff --git a/src/BE/Services/Conversations/Implementations/Azure/JsonAzureModelConfig.cs b/src/BE/Services/ChatServices/Implementations/Azure/JsonAzureModelConfig.cs similarity index 86% rename from src/BE/Services/Conversations/Implementations/Azure/JsonAzureModelConfig.cs rename to src/BE/Services/ChatServices/Implementations/Azure/JsonAzureModelConfig.cs index eefb6aad..15f77097 100644 --- a/src/BE/Services/Conversations/Implementations/Azure/JsonAzureModelConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/Azure/JsonAzureModelConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.Azure; +namespace Chats.BE.Services.ChatServices.Implementations.Azure; public record JsonAzureModelConfig { diff --git a/src/BE/Services/Conversations/Implementations/DashScope/DashScopeConversationService.cs b/src/BE/Services/ChatServices/Implementations/DashScope/DashScopeChatService.cs similarity index 94% rename from src/BE/Services/Conversations/Implementations/DashScope/DashScopeConversationService.cs rename to src/BE/Services/ChatServices/Implementations/DashScope/DashScopeChatService.cs index 2b431f0d..9b425946 100644 --- a/src/BE/Services/Conversations/Implementations/DashScope/DashScopeConversationService.cs +++ b/src/BE/Services/ChatServices/Implementations/DashScope/DashScopeChatService.cs @@ -1,4 +1,4 @@ -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices.Dtos; using Sdcb.DashScope; using Sdcb.DashScope.TextGeneration; using OpenAIChatMessage = OpenAI.Chat.ChatMessage; @@ -9,18 +9,18 @@ using System.Runtime.CompilerServices; using OpenAI.Chat; using Chats.BE.DB; -using Chats.BE.Services.Conversations.Extensions; +using Chats.BE.Services.ChatServices.Extensions; using ChatTokenUsage = Sdcb.DashScope.TextGeneration.ChatTokenUsage; using ChatMessage = Sdcb.DashScope.TextGeneration.ChatMessage; -namespace Chats.BE.Services.Conversations.Implementations.DashScope; +namespace Chats.BE.Services.ChatServices.Implementations.DashScope; -public class DashScopeConversationService : ConversationService +public class DashScopeChatService : ChatService { private DashScopeClient Client { get; } private TextGenerationClient ChatClient { get; } - public DashScopeConversationService(Model model) : base(model) + public DashScopeChatService(Model model) : base(model) { ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Secret, nameof(model.ModelKey.Secret)); diff --git a/src/BE/Services/Conversations/Implementations/DashScope/JsonDashScopeConfig.cs b/src/BE/Services/ChatServices/Implementations/DashScope/JsonDashScopeConfig.cs similarity index 77% rename from src/BE/Services/Conversations/Implementations/DashScope/JsonDashScopeConfig.cs rename to src/BE/Services/ChatServices/Implementations/DashScope/JsonDashScopeConfig.cs index 77de7d5e..945184c1 100644 --- a/src/BE/Services/Conversations/Implementations/DashScope/JsonDashScopeConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/DashScope/JsonDashScopeConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.DashScope; +namespace Chats.BE.Services.ChatServices.Implementations.DashScope; public class JsonDashScopeConfig { diff --git a/src/BE/Services/Conversations/Implementations/DashScope/JsonDashScopeModelConfig.cs b/src/BE/Services/ChatServices/Implementations/DashScope/JsonDashScopeModelConfig.cs similarity index 86% rename from src/BE/Services/Conversations/Implementations/DashScope/JsonDashScopeModelConfig.cs rename to src/BE/Services/ChatServices/Implementations/DashScope/JsonDashScopeModelConfig.cs index ed46eae2..b52ee969 100644 --- a/src/BE/Services/Conversations/Implementations/DashScope/JsonDashScopeModelConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/DashScope/JsonDashScopeModelConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.DashScope; +namespace Chats.BE.Services.ChatServices.Implementations.DashScope; public record JsonDashScopeModelConfig { diff --git a/src/BE/Services/ChatServices/Implementations/GLM/GLMChatService.cs b/src/BE/Services/ChatServices/Implementations/GLM/GLMChatService.cs new file mode 100644 index 00000000..088451e0 --- /dev/null +++ b/src/BE/Services/ChatServices/Implementations/GLM/GLMChatService.cs @@ -0,0 +1,8 @@ +using Chats.BE.DB; +using Chats.BE.Services.ChatServices.Implementations.OpenAI; + +namespace Chats.BE.Services.ChatServices.Implementations.GLM; + +public class GLMChatService(Model model) : OpenAIChatService(model, new Uri("https://open.bigmodel.cn/api/paas/v4/")) +{ +} diff --git a/src/BE/Services/Conversations/Implementations/GLM/JsonGLMModelConfig.cs b/src/BE/Services/ChatServices/Implementations/GLM/JsonGLMModelConfig.cs similarity index 84% rename from src/BE/Services/Conversations/Implementations/GLM/JsonGLMModelConfig.cs rename to src/BE/Services/ChatServices/Implementations/GLM/JsonGLMModelConfig.cs index dea55803..818a9498 100644 --- a/src/BE/Services/Conversations/Implementations/GLM/JsonGLMModelConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/GLM/JsonGLMModelConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.GLM; +namespace Chats.BE.Services.ChatServices.Implementations.GLM; public record JsonGLMModelConfig { diff --git a/src/BE/Services/Conversations/Implementations/Hunyuan/HunyuanConversationService.cs b/src/BE/Services/ChatServices/Implementations/Hunyuan/HunyuanChatService.cs similarity index 95% rename from src/BE/Services/Conversations/Implementations/Hunyuan/HunyuanConversationService.cs rename to src/BE/Services/ChatServices/Implementations/Hunyuan/HunyuanChatService.cs index e6ac6440..fcda09f0 100644 --- a/src/BE/Services/Conversations/Implementations/Hunyuan/HunyuanConversationService.cs +++ b/src/BE/Services/ChatServices/Implementations/Hunyuan/HunyuanChatService.cs @@ -1,4 +1,4 @@ -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices.Dtos; using OpenAI.Chat; using System.Text.Json; using TencentCloud.Common; @@ -14,13 +14,13 @@ using Chats.BE.DB; using Message = TencentCloud.Hunyuan.V20230901.Models.Message; -namespace Chats.BE.Services.Conversations.Implementations.Hunyuan; +namespace Chats.BE.Services.ChatServices.Implementations.Hunyuan; -public class HunyuanConversationService : ConversationService +public class HunyuanChatService : ChatService { private HunyuanClient ChatClient { get; } - public HunyuanConversationService(Model model) : base(model) + public HunyuanChatService(Model model) : base(model) { ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Host, nameof(model.ModelKey.Host)); ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Secret, nameof(model.ModelKey.Secret)); diff --git a/src/BE/Services/Conversations/Implementations/Hunyuan/HuyuanChatSegment.cs b/src/BE/Services/ChatServices/Implementations/Hunyuan/HuyuanChatSegment.cs similarity index 94% rename from src/BE/Services/Conversations/Implementations/Hunyuan/HuyuanChatSegment.cs rename to src/BE/Services/ChatServices/Implementations/Hunyuan/HuyuanChatSegment.cs index c82c6cfc..420ff1a4 100644 --- a/src/BE/Services/Conversations/Implementations/Hunyuan/HuyuanChatSegment.cs +++ b/src/BE/Services/ChatServices/Implementations/Hunyuan/HuyuanChatSegment.cs @@ -1,4 +1,4 @@ -namespace Chats.BE.Services.Conversations.Implementations.Hunyuan; +namespace Chats.BE.Services.ChatServices.Implementations.Hunyuan; using System.Text.Json.Serialization; diff --git a/src/BE/Services/Conversations/Implementations/Hunyuan/JsonHunyuanKeyConfig.cs b/src/BE/Services/ChatServices/Implementations/Hunyuan/JsonHunyuanKeyConfig.cs similarity index 79% rename from src/BE/Services/Conversations/Implementations/Hunyuan/JsonHunyuanKeyConfig.cs rename to src/BE/Services/ChatServices/Implementations/Hunyuan/JsonHunyuanKeyConfig.cs index b493d7e4..b681046c 100644 --- a/src/BE/Services/Conversations/Implementations/Hunyuan/JsonHunyuanKeyConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/Hunyuan/JsonHunyuanKeyConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.Hunyuan; +namespace Chats.BE.Services.ChatServices.Implementations.Hunyuan; public record JsonHunyuanKeyConfig { diff --git a/src/BE/Services/Conversations/Implementations/Hunyuan/JsonHunyuanModelConfig.cs b/src/BE/Services/ChatServices/Implementations/Hunyuan/JsonHunyuanModelConfig.cs similarity index 83% rename from src/BE/Services/Conversations/Implementations/Hunyuan/JsonHunyuanModelConfig.cs rename to src/BE/Services/ChatServices/Implementations/Hunyuan/JsonHunyuanModelConfig.cs index 79e268ab..bb9853e5 100644 --- a/src/BE/Services/Conversations/Implementations/Hunyuan/JsonHunyuanModelConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/Hunyuan/JsonHunyuanModelConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.Hunyuan; +namespace Chats.BE.Services.ChatServices.Implementations.Hunyuan; public record JsonHunyuanModelConfig { diff --git a/src/BE/Services/Conversations/Implementations/Hunyuan/UnixDateTimeOffsetConverter.cs b/src/BE/Services/ChatServices/Implementations/Hunyuan/UnixDateTimeOffsetConverter.cs similarity index 91% rename from src/BE/Services/Conversations/Implementations/Hunyuan/UnixDateTimeOffsetConverter.cs rename to src/BE/Services/ChatServices/Implementations/Hunyuan/UnixDateTimeOffsetConverter.cs index 1f689d27..a968e35c 100644 --- a/src/BE/Services/Conversations/Implementations/Hunyuan/UnixDateTimeOffsetConverter.cs +++ b/src/BE/Services/ChatServices/Implementations/Hunyuan/UnixDateTimeOffsetConverter.cs @@ -1,7 +1,7 @@ using System.Text.Json; using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.Hunyuan; +namespace Chats.BE.Services.ChatServices.Implementations.Hunyuan; internal class UnixDateTimeOffsetConverter : JsonConverter { diff --git a/src/BE/Services/Conversations/Implementations/Kimi/JsonKimiModelConfig.cs b/src/BE/Services/ChatServices/Implementations/Kimi/JsonKimiModelConfig.cs similarity index 79% rename from src/BE/Services/Conversations/Implementations/Kimi/JsonKimiModelConfig.cs rename to src/BE/Services/ChatServices/Implementations/Kimi/JsonKimiModelConfig.cs index bbf0266c..9e3e13df 100644 --- a/src/BE/Services/Conversations/Implementations/Kimi/JsonKimiModelConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/Kimi/JsonKimiModelConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.Kimi; +namespace Chats.BE.Services.ChatServices.Implementations.Kimi; public record JsonKimiModelConfig { diff --git a/src/BE/Services/ChatServices/Implementations/Kimi/KimiChatService.cs b/src/BE/Services/ChatServices/Implementations/Kimi/KimiChatService.cs new file mode 100644 index 00000000..c0a6c907 --- /dev/null +++ b/src/BE/Services/ChatServices/Implementations/Kimi/KimiChatService.cs @@ -0,0 +1,6 @@ +using Chats.BE.DB; +using Chats.BE.Services.ChatServices.Implementations.OpenAI; + +namespace Chats.BE.Services.ChatServices.Implementations.Kimi; + +public class KimiChatService(Model model) : OpenAIChatService(model, new Uri("https://api.moonshot.cn/v1")); \ No newline at end of file diff --git a/src/BE/Services/Conversations/Implementations/OpenAI/JsonOpenAIModelConfig.cs b/src/BE/Services/ChatServices/Implementations/OpenAI/JsonOpenAIModelConfig.cs similarity index 83% rename from src/BE/Services/Conversations/Implementations/OpenAI/JsonOpenAIModelConfig.cs rename to src/BE/Services/ChatServices/Implementations/OpenAI/JsonOpenAIModelConfig.cs index 5dd1e203..44e4d4a1 100644 --- a/src/BE/Services/Conversations/Implementations/OpenAI/JsonOpenAIModelConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/OpenAI/JsonOpenAIModelConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.OpenAI; +namespace Chats.BE.Services.ChatServices.Implementations.OpenAI; public record JsonOpenAIModelConfig { diff --git a/src/BE/Services/Conversations/Implementations/OpenAI/OpenAIConversationService.cs b/src/BE/Services/ChatServices/Implementations/OpenAI/OpenAIChatService.cs similarity index 86% rename from src/BE/Services/Conversations/Implementations/OpenAI/OpenAIConversationService.cs rename to src/BE/Services/ChatServices/Implementations/OpenAI/OpenAIChatService.cs index 87a101ea..be7ec7ff 100644 --- a/src/BE/Services/Conversations/Implementations/OpenAI/OpenAIConversationService.cs +++ b/src/BE/Services/ChatServices/Implementations/OpenAI/OpenAIChatService.cs @@ -1,17 +1,17 @@ -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices.Dtos; using OpenAI.Chat; using OpenAI; using System.Runtime.CompilerServices; using System.ClientModel; using Chats.BE.DB; -namespace Chats.BE.Services.Conversations.Implementations.OpenAI; +namespace Chats.BE.Services.ChatServices.Implementations.OpenAI; -public partial class OpenAIConversationService : ConversationService +public partial class OpenAIChatService : ChatService { private readonly ChatClient _chatClient; - public OpenAIConversationService(Model model, Uri? enforcedApiHost = null) : base(model) + public OpenAIChatService(Model model, Uri? enforcedApiHost = null) : base(model) { ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Secret, nameof(model.ModelKey.Secret)); @@ -22,7 +22,7 @@ public OpenAIConversationService(Model model, Uri? enforcedApiHost = null) : bas _chatClient = api.GetChatClient(model.ApiModelId); } - public OpenAIConversationService(Model model, ChatClient chatClient) : base(model) + public OpenAIChatService(Model model, ChatClient chatClient) : base(model) { _chatClient = chatClient; } diff --git a/src/BE/Services/Conversations/Implementations/QianFan/JsonQianFanApiConfig.cs b/src/BE/Services/ChatServices/Implementations/QianFan/JsonQianFanApiConfig.cs similarity index 78% rename from src/BE/Services/Conversations/Implementations/QianFan/JsonQianFanApiConfig.cs rename to src/BE/Services/ChatServices/Implementations/QianFan/JsonQianFanApiConfig.cs index d34e7284..36fdb87d 100644 --- a/src/BE/Services/Conversations/Implementations/QianFan/JsonQianFanApiConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/QianFan/JsonQianFanApiConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.QianFan; +namespace Chats.BE.Services.ChatServices.Implementations.QianFan; public record JsonQianFanApiConfig { diff --git a/src/BE/Services/Conversations/Implementations/QianFan/JsonQianFanModelConfig.cs b/src/BE/Services/ChatServices/Implementations/QianFan/JsonQianFanModelConfig.cs similarity index 86% rename from src/BE/Services/Conversations/Implementations/QianFan/JsonQianFanModelConfig.cs rename to src/BE/Services/ChatServices/Implementations/QianFan/JsonQianFanModelConfig.cs index 8208f20b..851a1596 100644 --- a/src/BE/Services/Conversations/Implementations/QianFan/JsonQianFanModelConfig.cs +++ b/src/BE/Services/ChatServices/Implementations/QianFan/JsonQianFanModelConfig.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations.Implementations.QianFan; +namespace Chats.BE.Services.ChatServices.Implementations.QianFan; public record JsonQianFanModelConfig { diff --git a/src/BE/Services/Conversations/Implementations/QianFan/QianFanConversationService.cs b/src/BE/Services/ChatServices/Implementations/QianFan/QianFanChatService.cs similarity index 92% rename from src/BE/Services/Conversations/Implementations/QianFan/QianFanConversationService.cs rename to src/BE/Services/ChatServices/Implementations/QianFan/QianFanChatService.cs index e65bdc02..c3b3356c 100644 --- a/src/BE/Services/Conversations/Implementations/QianFan/QianFanConversationService.cs +++ b/src/BE/Services/ChatServices/Implementations/QianFan/QianFanChatService.cs @@ -1,4 +1,4 @@ -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices.Dtos; using Sdcb.WenXinQianFan; using System.Runtime.CompilerServices; using System.Text.Json; @@ -10,15 +10,15 @@ using ChatMessage = Sdcb.WenXinQianFan.ChatMessage; using Chats.BE.DB; using OpenAI.Chat; -using Chats.BE.Services.Conversations.Extensions; +using Chats.BE.Services.ChatServices.Extensions; -namespace Chats.BE.Services.Conversations.Implementations.QianFan; +namespace Chats.BE.Services.ChatServices.Implementations.QianFan; -public class QianFanConversationService : ConversationService +public class QianFanChatService : ChatService { private QianFanClient ChatClient { get; } - public QianFanConversationService(Model model) : base(model) + public QianFanChatService(Model model) : base(model) { ArgumentException.ThrowIfNullOrWhiteSpace(model.ModelKey.Secret, nameof(model.ModelKey.Secret)); JsonQianFanApiConfig apiConfig = JsonSerializer.Deserialize(model.ModelKey.Secret)!; diff --git a/src/BE/Services/Conversations/Implementations/Test/TestConversationService.cs b/src/BE/Services/ChatServices/Implementations/Test/TestChatService.cs similarity index 95% rename from src/BE/Services/Conversations/Implementations/Test/TestConversationService.cs rename to src/BE/Services/ChatServices/Implementations/Test/TestChatService.cs index e3eed5f6..a04d783c 100644 --- a/src/BE/Services/Conversations/Implementations/Test/TestConversationService.cs +++ b/src/BE/Services/ChatServices/Implementations/Test/TestChatService.cs @@ -1,12 +1,12 @@ using Chats.BE.DB; -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices.Dtos; using OpenAI.Chat; using System.Runtime.CompilerServices; using System.Text; -namespace Chats.BE.Services.Conversations.Implementations.Test; +namespace Chats.BE.Services.ChatServices.Implementations.Test; -public class TestConversationService(Model model) : ConversationService(model) +public class TestChatService(Model model) : ChatService(model) { public override async IAsyncEnumerable ChatStreamed( IReadOnlyList messages, diff --git a/src/BE/Services/Conversations/InChatContext.cs b/src/BE/Services/ChatServices/InChatContext.cs similarity index 97% rename from src/BE/Services/Conversations/InChatContext.cs rename to src/BE/Services/ChatServices/InChatContext.cs index f3d3ff77..b211e911 100644 --- a/src/BE/Services/Conversations/InChatContext.cs +++ b/src/BE/Services/ChatServices/InChatContext.cs @@ -1,13 +1,13 @@ -using Chats.BE.Controllers.Chats.Conversations; +using Chats.BE.Controllers.Chats.Chats; using Chats.BE.DB; using Chats.BE.DB.Enums; using Chats.BE.DB.Jsons; using Chats.BE.Services.Common; -using Chats.BE.Services.Conversations.Dtos; +using Chats.BE.Services.ChatServices.Dtos; using System.Diagnostics; using System.Text; -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; public class InChatContext() { diff --git a/src/BE/Services/Conversations/ModelValidateResult.cs b/src/BE/Services/ChatServices/ModelValidateResult.cs similarity index 93% rename from src/BE/Services/Conversations/ModelValidateResult.cs rename to src/BE/Services/ChatServices/ModelValidateResult.cs index 83004b92..78ba76f1 100644 --- a/src/BE/Services/Conversations/ModelValidateResult.cs +++ b/src/BE/Services/ChatServices/ModelValidateResult.cs @@ -1,7 +1,7 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; -namespace Chats.BE.Services.Conversations; +namespace Chats.BE.Services.ChatServices; public record ModelValidateResult { diff --git a/src/BE/Services/Conversations/Implementations/GLM/GLMConversationService.cs b/src/BE/Services/Conversations/Implementations/GLM/GLMConversationService.cs deleted file mode 100644 index 694b2996..00000000 --- a/src/BE/Services/Conversations/Implementations/GLM/GLMConversationService.cs +++ /dev/null @@ -1,8 +0,0 @@ -using Chats.BE.DB; -using Chats.BE.Services.Conversations.Implementations.OpenAI; - -namespace Chats.BE.Services.Conversations.Implementations.GLM; - -public class GLMConversationService(Model model) : OpenAIConversationService(model, new Uri("https://open.bigmodel.cn/api/paas/v4/")) -{ -} diff --git a/src/BE/Services/Conversations/Implementations/Kimi/KimiConversationService.cs b/src/BE/Services/Conversations/Implementations/Kimi/KimiConversationService.cs deleted file mode 100644 index 65164c9c..00000000 --- a/src/BE/Services/Conversations/Implementations/Kimi/KimiConversationService.cs +++ /dev/null @@ -1,6 +0,0 @@ -using Chats.BE.DB; -using Chats.BE.Services.Conversations.Implementations.OpenAI; - -namespace Chats.BE.Services.Conversations.Implementations.Kimi; - -public class KimiConversationService(Model model) : OpenAIConversationService(model, new Uri("https://api.moonshot.cn/v1")); \ No newline at end of file From 3b581d9cb8ad7eaa98abf2c4c10142e67f9f5115 Mon Sep 17 00:00:00 2001 From: sdcb Date: Fri, 6 Dec 2024 23:07:10 +0800 Subject: [PATCH 3/5] add support for 01.ai --- src/BE/Services/ChatServices/ChatFactory.cs | 3 ++- .../Implementations/LingYi/LingYiChatService.cs | 6 ++++++ src/FE/components/Chat/Chat.tsx | 4 +++- src/FE/hooks/useFetch.ts | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 src/BE/Services/ChatServices/Implementations/LingYi/LingYiChatService.cs diff --git a/src/BE/Services/ChatServices/ChatFactory.cs b/src/BE/Services/ChatServices/ChatFactory.cs index 0befd67a..40701b16 100644 --- a/src/BE/Services/ChatServices/ChatFactory.cs +++ b/src/BE/Services/ChatServices/ChatFactory.cs @@ -5,6 +5,7 @@ using Chats.BE.Services.ChatServices.Implementations.GLM; using Chats.BE.Services.ChatServices.Implementations.Hunyuan; using Chats.BE.Services.ChatServices.Implementations.Kimi; +using Chats.BE.Services.ChatServices.Implementations.LingYi; using Chats.BE.Services.ChatServices.Implementations.OpenAI; using Chats.BE.Services.ChatServices.Implementations.QianFan; using Chats.BE.Services.ChatServices.Implementations.Test; @@ -28,7 +29,7 @@ public ChatService CreateConversationService(Model model) DBModelProvider.Moonshot => new KimiChatService(model), DBModelProvider.HunYuan => new HunyuanChatService(model), DBModelProvider.Sparkdesk => throw new NotImplementedException("Spark model is not implemented"), - DBModelProvider.LingYi => throw new NotImplementedException("LingYi model is not implemented"), + DBModelProvider.LingYi => new LingYiChatService(model), _ => throw new NotSupportedException($"Unknown model provider: {modelProvider}") }; return cs; diff --git a/src/BE/Services/ChatServices/Implementations/LingYi/LingYiChatService.cs b/src/BE/Services/ChatServices/Implementations/LingYi/LingYiChatService.cs new file mode 100644 index 00000000..d777fd27 --- /dev/null +++ b/src/BE/Services/ChatServices/Implementations/LingYi/LingYiChatService.cs @@ -0,0 +1,6 @@ +using Chats.BE.DB; +using Chats.BE.Services.ChatServices.Implementations.OpenAI; + +namespace Chats.BE.Services.ChatServices.Implementations.LingYi; + +public class LingYiChatService(Model model) : OpenAIChatService(model, new Uri("https://api.lingyiwanwu.com/v1")); diff --git a/src/FE/components/Chat/Chat.tsx b/src/FE/components/Chat/Chat.tsx index 238dbb71..841fb68b 100644 --- a/src/FE/components/Chat/Chat.tsx +++ b/src/FE/components/Chat/Chat.tsx @@ -392,7 +392,9 @@ export const Chat = memo(() => { ...userModelConfig, enableSearch: model.allowSearch, }); - putUserChatModel(selectChat.id, model.modelId); + if (selectChat.id) { + putUserChatModel(selectChat.id, model.modelId); + } }} /> )} diff --git a/src/FE/hooks/useFetch.ts b/src/FE/hooks/useFetch.ts index 61da2f39..3a3d4046 100644 --- a/src/FE/hooks/useFetch.ts +++ b/src/FE/hooks/useFetch.ts @@ -102,7 +102,7 @@ export const useFetch = () => { }) .then(async (response) => { if (!response.ok) { - await handleErrorResponse(response); + throw response; } const result = readResponse(response); From 30db00798bb8b8a0b03fb91400b1fe03f67b26c7 Mon Sep 17 00:00:00 2001 From: sdcb Date: Fri, 6 Dec 2024 23:32:02 +0800 Subject: [PATCH 4/5] add support for deepseek --- src/BE/DB/Enums/DBModelProvider.cs | 3 ++- src/BE/Services/ChatServices/ChatFactory.cs | 2 ++ .../Implementations/DeepSeek/DeepSeekChatService.cs | 6 ++++++ src/FE/components/Chat/SystemPrompt.tsx | 11 +++++------ src/FE/components/HomeContent/HomeContent.tsx | 2 +- src/FE/locales/zh-CN.json | 1 + src/FE/public/logos/deepseek.svg | 3 +++ src/FE/types/model.ts | 2 ++ src/FE/utils/common.ts | 1 - 9 files changed, 22 insertions(+), 9 deletions(-) create mode 100644 src/BE/Services/ChatServices/Implementations/DeepSeek/DeepSeekChatService.cs create mode 100644 src/FE/public/logos/deepseek.svg diff --git a/src/BE/DB/Enums/DBModelProvider.cs b/src/BE/DB/Enums/DBModelProvider.cs index 39df10ef..c5f47d14 100644 --- a/src/BE/DB/Enums/DBModelProvider.cs +++ b/src/BE/DB/Enums/DBModelProvider.cs @@ -11,5 +11,6 @@ public enum DBModelProvider WenXinQianFan = 6, AliyunDashscope = 7, Sparkdesk = 8, - ZhiPuAI = 9 + ZhiPuAI = 9, + DeepSeek = 10, } diff --git a/src/BE/Services/ChatServices/ChatFactory.cs b/src/BE/Services/ChatServices/ChatFactory.cs index 40701b16..611642f1 100644 --- a/src/BE/Services/ChatServices/ChatFactory.cs +++ b/src/BE/Services/ChatServices/ChatFactory.cs @@ -2,6 +2,7 @@ using Chats.BE.DB.Enums; using Chats.BE.Services.ChatServices.Implementations.Azure; using Chats.BE.Services.ChatServices.Implementations.DashScope; +using Chats.BE.Services.ChatServices.Implementations.DeepSeek; using Chats.BE.Services.ChatServices.Implementations.GLM; using Chats.BE.Services.ChatServices.Implementations.Hunyuan; using Chats.BE.Services.ChatServices.Implementations.Kimi; @@ -30,6 +31,7 @@ public ChatService CreateConversationService(Model model) DBModelProvider.HunYuan => new HunyuanChatService(model), DBModelProvider.Sparkdesk => throw new NotImplementedException("Spark model is not implemented"), DBModelProvider.LingYi => new LingYiChatService(model), + DBModelProvider.DeepSeek => new DeepSeekChatService(model), _ => throw new NotSupportedException($"Unknown model provider: {modelProvider}") }; return cs; diff --git a/src/BE/Services/ChatServices/Implementations/DeepSeek/DeepSeekChatService.cs b/src/BE/Services/ChatServices/Implementations/DeepSeek/DeepSeekChatService.cs new file mode 100644 index 00000000..e178ebda --- /dev/null +++ b/src/BE/Services/ChatServices/Implementations/DeepSeek/DeepSeekChatService.cs @@ -0,0 +1,6 @@ +using Chats.BE.DB; +using Chats.BE.Services.ChatServices.Implementations.OpenAI; + +namespace Chats.BE.Services.ChatServices.Implementations.DeepSeek; + +public class DeepSeekChatService(Model model) : OpenAIChatService(model, new Uri("https://api.deepseek.com/v1")); \ No newline at end of file diff --git a/src/FE/components/Chat/SystemPrompt.tsx b/src/FE/components/Chat/SystemPrompt.tsx index 8edf0004..478360ba 100644 --- a/src/FE/components/Chat/SystemPrompt.tsx +++ b/src/FE/components/Chat/SystemPrompt.tsx @@ -29,7 +29,7 @@ interface Props { export const SystemPrompt: FC = ({ currentPrompt, prompts, - model, + model, onChangePromptText, onChangePrompt, }) => { @@ -164,8 +164,8 @@ export const SystemPrompt: FC = ({ }, [value]); useEffect(() => { - setValue(currentPrompt); - }, [currentPrompt]); + setValue(formatPrompt(currentPrompt, { model })); + }, [currentPrompt, model]); useEffect(() => { const handleOutsideClick = (e: MouseEvent) => { @@ -196,11 +196,10 @@ export const SystemPrompt: FC = ({ resize: 'none', bottom: `${textareaRef?.current?.scrollHeight}px`, maxHeight: '300px', - overflow: `${ - textareaRef.current && textareaRef.current.scrollHeight > 400 + overflow: `${textareaRef.current && textareaRef.current.scrollHeight > 400 ? 'auto' : 'hidden' - }`, + }`, }} placeholder={ t(`Enter a prompt or type "/" to select a prompt...`) || '' diff --git a/src/FE/components/HomeContent/HomeContent.tsx b/src/FE/components/HomeContent/HomeContent.tsx index eacdde3d..49b60c72 100644 --- a/src/FE/components/HomeContent/HomeContent.tsx +++ b/src/FE/components/HomeContent/HomeContent.tsx @@ -103,7 +103,7 @@ const HomeContent = () => { data.temperature ?? userModelConfig?.temperature ?? DEFAULT_TEMPERATURE, - prompt: formatPrompt(data.content, { model }), + prompt: data.content, }); }); }; diff --git a/src/FE/locales/zh-CN.json b/src/FE/locales/zh-CN.json index 8d5de6b7..590a1efd 100644 --- a/src/FE/locales/zh-CN.json +++ b/src/FE/locales/zh-CN.json @@ -325,6 +325,7 @@ "DashScope": "通义千问", "Xunfei SparkDesk": "讯飞星火", "Zhipu AI": "智谱AI", + "DeepSeek": "DeepSeek", "Invalid JSON config": "无效的JSON配置", "Invalid host": "无效的host", "Invalid secret": "无效的secret", diff --git a/src/FE/public/logos/deepseek.svg b/src/FE/public/logos/deepseek.svg new file mode 100644 index 00000000..24ab7be7 --- /dev/null +++ b/src/FE/public/logos/deepseek.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/src/FE/types/model.ts b/src/FE/types/model.ts index c3780f6b..6e5e51bf 100644 --- a/src/FE/types/model.ts +++ b/src/FE/types/model.ts @@ -15,6 +15,7 @@ export enum DBModelProvider { QianWen = 7, Spark = 8, ZhiPuAI = 9, + DeepSeek = 10, } export type FEModelProvider = { @@ -34,6 +35,7 @@ export const feModelProviders: FEModelProvider[] = [ { id: 7, name: 'DashScope', icon: '/logos/qianwen.svg' }, { id: 8, name: 'Xunfei SparkDesk', icon: '/logos/spark.svg' }, { id: 9, name: 'Zhipu AI', icon: '/logos/zhipuai.svg' }, + { id: 10, name: 'DeepSeek', icon: '/logos/deepseek.svg' }, ]; export interface ChatModelFileConfig { diff --git a/src/FE/utils/common.ts b/src/FE/utils/common.ts index 4005943d..dd4f34df 100644 --- a/src/FE/utils/common.ts +++ b/src/FE/utils/common.ts @@ -35,7 +35,6 @@ export const getApiUrl = () => : process.env.API_URL; export const getQueryId = (router: NextRouter): string => { - console.log(router.query); const { id } = router.query; if (id) { if (Array.isArray(id)) { From fae4590be44faa4ba2299f029fb5a2e32dcc8c74 Mon Sep 17 00:00:00 2001 From: sdcb Date: Fri, 6 Dec 2024 23:35:43 +0800 Subject: [PATCH 5/5] add initial data for deepseek --- src/BE/DB/Init/BasicData.cs | 248 ++++++++++++++++++------------------ 1 file changed, 125 insertions(+), 123 deletions(-) diff --git a/src/BE/DB/Init/BasicData.cs b/src/BE/DB/Init/BasicData.cs index 64a45a3a..7f9c0d59 100644 --- a/src/BE/DB/Init/BasicData.cs +++ b/src/BE/DB/Init/BasicData.cs @@ -107,138 +107,140 @@ private static void InsertTransactionTypes(ChatsDB db) private static void InsertModelReferences(ChatsDB db) { - // Generated from data, hash: 402f5ef77477ae6e08aa26ddfb7d4d900742206573e59d985caa51438291c7fd + // Generated from data, hash: c5fc80eafe0435e9a03ed9df3b46fe5755d391047b0f8460d17228b4a10fd886 db.ModelReferences.AddRange( [ - new(){ Id=0, ProviderId=0, Name="Test", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=2048, MaxResponseTokens=2048, TokenizerId=1, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=100, ProviderId=1, Name="gpt-35-turbo-0301", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=101, ProviderId=1, Name="gpt-35-turbo-16k-0613", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16385, MaxResponseTokens=16385, TokenizerId=1, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=4.00000M, CurrencyCode="USD", }, - new(){ Id=102, ProviderId=1, Name="gpt-35-turbo-0613", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=103, ProviderId=1, Name="gpt-35-turbo-1106", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=104, ProviderId=1, Name="gpt-35-turbo-instruct", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=105, ProviderId=1, Name="gpt-35-turbo-0125", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=0.50000M, OutputTokenPrice1M=1.50000M, CurrencyCode="USD", }, - new(){ Id=106, ProviderId=1, Name="gpt-4-vision-preview", ShortName="gpt-4-vision", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=107, ProviderId=1, Name="gpt-4-1106-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=108, ProviderId=1, Name="gpt-4-0125-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=109, ProviderId=1, Name="gpt-4-32k", ShortName="gpt-4-32k", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=1, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="USD", }, - new(){ Id=110, ProviderId=1, Name="gpt-4", ShortName="gpt-4", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=1, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, - new(){ Id=111, ProviderId=1, Name="gpt-4-turbo-2024-04-09", ShortName="gpt-4-turbo", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=112, ProviderId=1, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, - new(){ Id=113, ProviderId=1, Name="o1-mini-2024-09-12", ShortName="o1-mini", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=2, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", }, - new(){ Id=114, ProviderId=1, Name="gpt-4o-mini-2024-07-18", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=0.15000M, OutputTokenPrice1M=0.60000M, CurrencyCode="USD", }, - new(){ Id=115, ProviderId=1, Name="gpt-4o-2024-05-13", ShortName="gpt-4o", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, - new(){ Id=116, ProviderId=1, Name="gpt-4o-2024-08-06", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, - new(){ Id=117, ProviderId=1, Name="gpt-4o-2024-11-20", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, - new(){ Id=200, ProviderId=2, Name="hunyuan-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", }, - new(){ Id=201, ProviderId=2, Name="hunyuan-pro", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=100.00000M, CurrencyCode="RMB", }, - new(){ Id=202, ProviderId=2, Name="hunyuan-standard-256K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=262144, MaxResponseTokens=6144, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, - new(){ Id=203, ProviderId=2, Name="hunyuan-standard", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=4.50000M, OutputTokenPrice1M=5.00000M, CurrencyCode="RMB", }, - new(){ Id=204, ProviderId=2, Name="hunyuan-lite", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=262144, MaxResponseTokens=6144, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=205, ProviderId=2, Name="hunyuan-role", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=206, ProviderId=2, Name="hunyuan-functioncall ", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=207, ProviderId=2, Name="hunyuan-code", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=208, ProviderId=2, Name="hunyuan-vision", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=18.00000M, OutputTokenPrice1M=18.00000M, CurrencyCode="RMB", }, - new(){ Id=300, ProviderId=3, Name="yi-lightning", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=0.99000M, OutputTokenPrice1M=0.99000M, CurrencyCode="RMB", }, - new(){ Id=301, ProviderId=3, Name="yi-large", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=20.00000M, CurrencyCode="RMB", }, - new(){ Id=302, ProviderId=3, Name="yi-medium", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=2.50000M, CurrencyCode="RMB", }, - new(){ Id=303, ProviderId=3, Name="yi-vision", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=6.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, - new(){ Id=304, ProviderId=3, Name="yi-medium-200k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=204800, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=12.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, - new(){ Id=305, ProviderId=3, Name="yi-spark", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=1.00000M, CurrencyCode="RMB", }, - new(){ Id=306, ProviderId=3, Name="yi-large-rag", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=25.00000M, OutputTokenPrice1M=25.00000M, CurrencyCode="RMB", }, - new(){ Id=307, ProviderId=3, Name="yi-large-fc", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=20.00000M, CurrencyCode="RMB", }, - new(){ Id=308, ProviderId=3, Name="yi-large-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=12.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, - new(){ Id=400, ProviderId=4, Name="moonshot-v1-8k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=12.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, - new(){ Id=401, ProviderId=4, Name="moonshot-v1-32k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=null, InputTokenPrice1M=24.00000M, OutputTokenPrice1M=24.00000M, CurrencyCode="RMB", }, - new(){ Id=402, ProviderId=4, Name="moonshot-v1-128k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=131072, TokenizerId=null, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, - new(){ Id=500, ProviderId=5, Name="gpt-3.5-turbo-0301", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=501, ProviderId=5, Name="gpt-3.5-turbo-16k-0613", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16385, MaxResponseTokens=16385, TokenizerId=1, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=4.00000M, CurrencyCode="USD", }, - new(){ Id=502, ProviderId=5, Name="gpt-3.5-turbo-0613", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=503, ProviderId=5, Name="gpt-3.5-turbo-1106", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=504, ProviderId=5, Name="gpt-3.5-turbo-instruct", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, - new(){ Id=505, ProviderId=5, Name="gpt-3.5-turbo-0125", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=0.50000M, OutputTokenPrice1M=1.50000M, CurrencyCode="USD", }, - new(){ Id=506, ProviderId=5, Name="gpt-4-vision-preview", ShortName="gpt-4-vision", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=507, ProviderId=5, Name="gpt-4-1106-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=508, ProviderId=5, Name="gpt-4-0125-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=509, ProviderId=5, Name="gpt-4-32k", ShortName="gpt-4-32k", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=1, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="USD", }, - new(){ Id=510, ProviderId=5, Name="gpt-4", ShortName="gpt-4", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=1, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, - new(){ Id=511, ProviderId=5, Name="gpt-4-turbo-2024-04-09", ShortName="gpt-4-turbo", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, - new(){ Id=512, ProviderId=5, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, - new(){ Id=513, ProviderId=5, Name="o1-mini-2024-09-12", ShortName="o1-mini", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=2, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", }, - new(){ Id=514, ProviderId=5, Name="gpt-4o-mini-2024-07-18", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=0.15000M, OutputTokenPrice1M=0.60000M, CurrencyCode="USD", }, - new(){ Id=515, ProviderId=5, Name="gpt-4o-2024-05-13", ShortName="gpt-4o", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, - new(){ Id=516, ProviderId=5, Name="gpt-4o-2024-08-06", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, - new(){ Id=517, ProviderId=5, Name="gpt-4o-2024-11-20", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, - new(){ Id=600, ProviderId=6, Name="ERNIE-4.0-Turbo-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=90.00000M, CurrencyCode="RMB", }, - new(){ Id=601, ProviderId=6, Name="ERNIE-4.0-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, - new(){ Id=602, ProviderId=6, Name="ERNIE-3.5-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=603, ProviderId=6, Name="ERNIE-3.5-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=604, ProviderId=6, Name="ERNIE-Speed-Pro-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.30000M, OutputTokenPrice1M=0.60000M, CurrencyCode="RMB", }, - new(){ Id=605, ProviderId=6, Name="ERNIE-Novel-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=40.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="RMB", }, - new(){ Id=606, ProviderId=6, Name="ERNIE-Speed-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=607, ProviderId=6, Name="ERNIE-Speed-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=608, ProviderId=6, Name="ERNIE-Lite-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=609, ProviderId=6, Name="ERNIE-Lite-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=610, ProviderId=6, Name="ERNIE-Tiny-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=611, ProviderId=6, Name="ERNIE-Tiny-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=612, ProviderId=6, Name="ERNIE-Character-Fiction-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=613, ProviderId=6, Name="ERNIE-Functions-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=614, ProviderId=6, Name="ERNIE-Lite-Pro-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.20000M, OutputTokenPrice1M=0.40000M, CurrencyCode="RMB", }, - new(){ Id=700, ProviderId=7, Name="qwen-max", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, - new(){ Id=701, ProviderId=7, Name="qwen-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=702, ProviderId=7, Name="qwen-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.30000M, OutputTokenPrice1M=0.60000M, CurrencyCode="RMB", }, - new(){ Id=703, ProviderId=7, Name="qwen-long", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=10000000, MaxResponseTokens=6000, TokenizerId=null, InputTokenPrice1M=0.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=704, ProviderId=7, Name="qwen-vl-max", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=20.00000M, CurrencyCode="RMB", }, - new(){ Id=705, ProviderId=7, Name="qwen-vl-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=8.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=706, ProviderId=7, Name="qwen-math-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, - new(){ Id=707, ProviderId=7, Name="qwen-math-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, - new(){ Id=708, ProviderId=7, Name="qwen-coder-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, - new(){ Id=709, ProviderId=7, Name="qwen2.5-72b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, - new(){ Id=710, ProviderId=7, Name="qwen2.5-32b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=3.50000M, OutputTokenPrice1M=7.00000M, CurrencyCode="RMB", }, - new(){ Id=711, ProviderId=7, Name="qwen2.5-14b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, - new(){ Id=712, ProviderId=7, Name="qwen2.5-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=713, ProviderId=7, Name="qwen2.5-3b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=714, ProviderId=7, Name="qwen2.5-1.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=715, ProviderId=7, Name="qwen2.5-0.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=716, ProviderId=7, Name="qwen2-vl-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=8.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=717, ProviderId=7, Name="qwen2-vl-2b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=8.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, - new(){ Id=718, ProviderId=7, Name="qwen2.5-math-72b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, - new(){ Id=719, ProviderId=7, Name="qwen2.5-math-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=720, ProviderId=7, Name="qwen2.5-math-1.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=721, ProviderId=7, Name="qwen2.5-coder-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, - new(){ Id=722, ProviderId=7, Name="qwen2.5-coder-1.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=800, ProviderId=8, Name="lite", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=801, ProviderId=8, Name="generalv3", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=7.00000M, OutputTokenPrice1M=7.00000M, CurrencyCode="RMB", }, - new(){ Id=802, ProviderId=8, Name="pro-128k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=13.00000M, OutputTokenPrice1M=13.00000M, CurrencyCode="RMB", }, - new(){ Id=803, ProviderId=8, Name="generalv3.5", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="RMB", }, - new(){ Id=804, ProviderId=8, Name="max-32k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=32.00000M, OutputTokenPrice1M=32.00000M, CurrencyCode="RMB", }, - new(){ Id=805, ProviderId=8, Name="4.0Ultra", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=70.00000M, OutputTokenPrice1M=70.00000M, CurrencyCode="RMB", }, - new(){ Id=900, ProviderId=9, Name="glm-4-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=50.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", }, - new(){ Id=901, ProviderId=9, Name="glm-4-0520", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=100.00000M, OutputTokenPrice1M=100.00000M, CurrencyCode="RMB", }, - new(){ Id=902, ProviderId=9, Name="glm-4-air", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=1.00000M, CurrencyCode="RMB", }, - new(){ Id=903, ProviderId=9, Name="glm-4-airx", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=10.00000M, CurrencyCode="RMB", }, - new(){ Id=904, ProviderId=9, Name="glm-4-long", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=1048576, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=1.00000M, CurrencyCode="RMB", }, - new(){ Id=905, ProviderId=9, Name="glm-4-flashx", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.10000M, OutputTokenPrice1M=0.10000M, CurrencyCode="RMB", }, - new(){ Id=906, ProviderId=9, Name="glm-4-flash", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, - new(){ Id=907, ProviderId=9, Name="glm-4v-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=10.00000M, CurrencyCode="RMB", }, - new(){ Id=908, ProviderId=9, Name="glm-4v", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=2048, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=50.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", } + new(){ Id=0, ProviderId=0, Name="Test", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=2048, MaxResponseTokens=2048, TokenizerId=1, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=100, ProviderId=1, Name="gpt-35-turbo-0301", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=101, ProviderId=1, Name="gpt-35-turbo-16k-0613", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16385, MaxResponseTokens=16385, TokenizerId=1, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=4.00000M, CurrencyCode="USD", }, + new(){ Id=102, ProviderId=1, Name="gpt-35-turbo-0613", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=103, ProviderId=1, Name="gpt-35-turbo-1106", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=104, ProviderId=1, Name="gpt-35-turbo-instruct", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=105, ProviderId=1, Name="gpt-35-turbo-0125", ShortName="gpt-35-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=0.50000M, OutputTokenPrice1M=1.50000M, CurrencyCode="USD", }, + new(){ Id=106, ProviderId=1, Name="gpt-4-vision-preview", ShortName="gpt-4-vision", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=107, ProviderId=1, Name="gpt-4-1106-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=108, ProviderId=1, Name="gpt-4-0125-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=109, ProviderId=1, Name="gpt-4-32k", ShortName="gpt-4-32k", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=1, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="USD", }, + new(){ Id=110, ProviderId=1, Name="gpt-4", ShortName="gpt-4", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=1, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, + new(){ Id=111, ProviderId=1, Name="gpt-4-turbo-2024-04-09", ShortName="gpt-4-turbo", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=112, ProviderId=1, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, + new(){ Id=113, ProviderId=1, Name="o1-mini-2024-09-12", ShortName="o1-mini", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=2, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", }, + new(){ Id=114, ProviderId=1, Name="gpt-4o-mini-2024-07-18", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=0.15000M, OutputTokenPrice1M=0.60000M, CurrencyCode="USD", }, + new(){ Id=115, ProviderId=1, Name="gpt-4o-2024-05-13", ShortName="gpt-4o", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, + new(){ Id=116, ProviderId=1, Name="gpt-4o-2024-08-06", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, + new(){ Id=117, ProviderId=1, Name="gpt-4o-2024-11-20", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, + new(){ Id=200, ProviderId=2, Name="hunyuan-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", }, + new(){ Id=201, ProviderId=2, Name="hunyuan-pro", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=100.00000M, CurrencyCode="RMB", }, + new(){ Id=202, ProviderId=2, Name="hunyuan-standard-256K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=262144, MaxResponseTokens=6144, TokenizerId=null, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, + new(){ Id=203, ProviderId=2, Name="hunyuan-standard", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=4.50000M, OutputTokenPrice1M=5.00000M, CurrencyCode="RMB", }, + new(){ Id=204, ProviderId=2, Name="hunyuan-lite", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=262144, MaxResponseTokens=6144, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=205, ProviderId=2, Name="hunyuan-role", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=206, ProviderId=2, Name="hunyuan-functioncall ", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=207, ProviderId=2, Name="hunyuan-code", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=208, ProviderId=2, Name="hunyuan-vision", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=18.00000M, OutputTokenPrice1M=18.00000M, CurrencyCode="RMB", }, + new(){ Id=300, ProviderId=3, Name="yi-lightning", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=0.99000M, OutputTokenPrice1M=0.99000M, CurrencyCode="RMB", }, + new(){ Id=301, ProviderId=3, Name="yi-large", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=20.00000M, CurrencyCode="RMB", }, + new(){ Id=302, ProviderId=3, Name="yi-medium", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=2.50000M, CurrencyCode="RMB", }, + new(){ Id=303, ProviderId=3, Name="yi-vision", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=6.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, + new(){ Id=304, ProviderId=3, Name="yi-medium-200k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=204800, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=12.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, + new(){ Id=305, ProviderId=3, Name="yi-spark", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=1.00000M, CurrencyCode="RMB", }, + new(){ Id=306, ProviderId=3, Name="yi-large-rag", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=25.00000M, OutputTokenPrice1M=25.00000M, CurrencyCode="RMB", }, + new(){ Id=307, ProviderId=3, Name="yi-large-fc", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=20.00000M, CurrencyCode="RMB", }, + new(){ Id=308, ProviderId=3, Name="yi-large-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16384, MaxResponseTokens=16384, TokenizerId=null, InputTokenPrice1M=12.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, + new(){ Id=400, ProviderId=4, Name="moonshot-v1-8k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=12.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, + new(){ Id=401, ProviderId=4, Name="moonshot-v1-32k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=null, InputTokenPrice1M=24.00000M, OutputTokenPrice1M=24.00000M, CurrencyCode="RMB", }, + new(){ Id=402, ProviderId=4, Name="moonshot-v1-128k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=131072, TokenizerId=null, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, + new(){ Id=500, ProviderId=5, Name="gpt-3.5-turbo-0301", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=501, ProviderId=5, Name="gpt-3.5-turbo-16k-0613", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=16385, MaxResponseTokens=16385, TokenizerId=1, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=4.00000M, CurrencyCode="USD", }, + new(){ Id=502, ProviderId=5, Name="gpt-3.5-turbo-0613", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=503, ProviderId=5, Name="gpt-3.5-turbo-1106", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=504, ProviderId=5, Name="gpt-3.5-turbo-instruct", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=1.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="USD", }, + new(){ Id=505, ProviderId=5, Name="gpt-3.5-turbo-0125", ShortName="gpt-3.5-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=0.50000M, OutputTokenPrice1M=1.50000M, CurrencyCode="USD", }, + new(){ Id=506, ProviderId=5, Name="gpt-4-vision-preview", ShortName="gpt-4-vision", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=507, ProviderId=5, Name="gpt-4-1106-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=508, ProviderId=5, Name="gpt-4-0125-preview", ShortName="gpt-4-turbo", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=509, ProviderId=5, Name="gpt-4-32k", ShortName="gpt-4-32k", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=32768, TokenizerId=1, InputTokenPrice1M=60.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="USD", }, + new(){ Id=510, ProviderId=5, Name="gpt-4", ShortName="gpt-4", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=1, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, + new(){ Id=511, ProviderId=5, Name="gpt-4-turbo-2024-04-09", ShortName="gpt-4-turbo", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=1, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="USD", }, + new(){ Id=512, ProviderId=5, Name="o1-preview-2024-09-12", ShortName="o1-preview", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=32768, TokenizerId=2, InputTokenPrice1M=15.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="USD", }, + new(){ Id=513, ProviderId=5, Name="o1-mini-2024-09-12", ShortName="o1-mini", IsLegacy=false, MinTemperature=1.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=false, AllowStreaming=false, ContextWindow=128000, MaxResponseTokens=65536, TokenizerId=2, InputTokenPrice1M=3.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="USD", }, + new(){ Id=514, ProviderId=5, Name="gpt-4o-mini-2024-07-18", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=0.15000M, OutputTokenPrice1M=0.60000M, CurrencyCode="USD", }, + new(){ Id=515, ProviderId=5, Name="gpt-4o-2024-05-13", ShortName="gpt-4o", IsLegacy=true, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=4096, TokenizerId=2, InputTokenPrice1M=5.00000M, OutputTokenPrice1M=15.00000M, CurrencyCode="USD", }, + new(){ Id=516, ProviderId=5, Name="gpt-4o-2024-08-06", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, + new(){ Id=517, ProviderId=5, Name="gpt-4o-2024-11-20", ShortName="gpt-4o", IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=128000, MaxResponseTokens=16384, TokenizerId=2, InputTokenPrice1M=2.50000M, OutputTokenPrice1M=10.00000M, CurrencyCode="USD", }, + new(){ Id=600, ProviderId=6, Name="ERNIE-4.0-Turbo-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=90.00000M, CurrencyCode="RMB", }, + new(){ Id=601, ProviderId=6, Name="ERNIE-4.0-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, + new(){ Id=602, ProviderId=6, Name="ERNIE-3.5-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, + new(){ Id=603, ProviderId=6, Name="ERNIE-3.5-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, + new(){ Id=604, ProviderId=6, Name="ERNIE-Speed-Pro-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.30000M, OutputTokenPrice1M=0.60000M, CurrencyCode="RMB", }, + new(){ Id=605, ProviderId=6, Name="ERNIE-Novel-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=40.00000M, OutputTokenPrice1M=120.00000M, CurrencyCode="RMB", }, + new(){ Id=606, ProviderId=6, Name="ERNIE-Speed-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=607, ProviderId=6, Name="ERNIE-Speed-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=608, ProviderId=6, Name="ERNIE-Lite-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=609, ProviderId=6, Name="ERNIE-Lite-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=610, ProviderId=6, Name="ERNIE-Tiny-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=611, ProviderId=6, Name="ERNIE-Tiny-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=612, ProviderId=6, Name="ERNIE-Character-Fiction-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=613, ProviderId=6, Name="ERNIE-Functions-8K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=2048, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=614, ProviderId=6, Name="ERNIE-Lite-Pro-128K", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.20000M, OutputTokenPrice1M=0.40000M, CurrencyCode="RMB", }, + new(){ Id=700, ProviderId=7, Name="qwen-max", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=60.00000M, CurrencyCode="RMB", }, + new(){ Id=701, ProviderId=7, Name="qwen-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.80000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, + new(){ Id=702, ProviderId=7, Name="qwen-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.30000M, OutputTokenPrice1M=0.60000M, CurrencyCode="RMB", }, + new(){ Id=703, ProviderId=7, Name="qwen-long", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=10000000, MaxResponseTokens=6000, TokenizerId=null, InputTokenPrice1M=0.50000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, + new(){ Id=704, ProviderId=7, Name="qwen-vl-max", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=20.00000M, OutputTokenPrice1M=20.00000M, CurrencyCode="RMB", }, + new(){ Id=705, ProviderId=7, Name="qwen-vl-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=8.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=706, ProviderId=7, Name="qwen-math-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, + new(){ Id=707, ProviderId=7, Name="qwen-math-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, + new(){ Id=708, ProviderId=7, Name="qwen-coder-turbo", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, + new(){ Id=709, ProviderId=7, Name="qwen2.5-72b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, + new(){ Id=710, ProviderId=7, Name="qwen2.5-32b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=3.50000M, OutputTokenPrice1M=7.00000M, CurrencyCode="RMB", }, + new(){ Id=711, ProviderId=7, Name="qwen2.5-14b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=2.00000M, OutputTokenPrice1M=6.00000M, CurrencyCode="RMB", }, + new(){ Id=712, ProviderId=7, Name="qwen2.5-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, + new(){ Id=713, ProviderId=7, Name="qwen2.5-3b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=714, ProviderId=7, Name="qwen2.5-1.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=715, ProviderId=7, Name="qwen2.5-0.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=716, ProviderId=7, Name="qwen2-vl-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=8.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=717, ProviderId=7, Name="qwen2-vl-2b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32000, MaxResponseTokens=2000, TokenizerId=null, InputTokenPrice1M=8.00000M, OutputTokenPrice1M=8.00000M, CurrencyCode="RMB", }, + new(){ Id=718, ProviderId=7, Name="qwen2.5-math-72b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=4.00000M, OutputTokenPrice1M=12.00000M, CurrencyCode="RMB", }, + new(){ Id=719, ProviderId=7, Name="qwen2.5-math-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, + new(){ Id=720, ProviderId=7, Name="qwen2.5-math-1.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=3072, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=721, ProviderId=7, Name="qwen2.5-coder-7b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", }, + new(){ Id=722, ProviderId=7, Name="qwen2.5-coder-1.5b-instruct", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.99M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=800, ProviderId=8, Name="lite", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=4096, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=801, ProviderId=8, Name="generalv3", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=7.00000M, OutputTokenPrice1M=7.00000M, CurrencyCode="RMB", }, + new(){ Id=802, ProviderId=8, Name="pro-128k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=13.00000M, OutputTokenPrice1M=13.00000M, CurrencyCode="RMB", }, + new(){ Id=803, ProviderId=8, Name="generalv3.5", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=30.00000M, OutputTokenPrice1M=30.00000M, CurrencyCode="RMB", }, + new(){ Id=804, ProviderId=8, Name="max-32k", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=32768, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=32.00000M, OutputTokenPrice1M=32.00000M, CurrencyCode="RMB", }, + new(){ Id=805, ProviderId=8, Name="4.0Ultra", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=8192, TokenizerId=null, InputTokenPrice1M=70.00000M, OutputTokenPrice1M=70.00000M, CurrencyCode="RMB", }, + new(){ Id=900, ProviderId=9, Name="glm-4-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=50.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", }, + new(){ Id=901, ProviderId=9, Name="glm-4-0520", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=100.00000M, OutputTokenPrice1M=100.00000M, CurrencyCode="RMB", }, + new(){ Id=902, ProviderId=9, Name="glm-4-air", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=1.00000M, CurrencyCode="RMB", }, + new(){ Id=903, ProviderId=9, Name="glm-4-airx", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=10.00000M, CurrencyCode="RMB", }, + new(){ Id=904, ProviderId=9, Name="glm-4-long", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=1048576, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=1.00000M, CurrencyCode="RMB", }, + new(){ Id=905, ProviderId=9, Name="glm-4-flashx", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.10000M, OutputTokenPrice1M=0.10000M, CurrencyCode="RMB", }, + new(){ Id=906, ProviderId=9, Name="glm-4-flash", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=true, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=131072, MaxResponseTokens=4096, TokenizerId=null, InputTokenPrice1M=0.00000M, OutputTokenPrice1M=0.00000M, CurrencyCode="RMB", }, + new(){ Id=907, ProviderId=9, Name="glm-4v-plus", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=8192, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=10.00000M, OutputTokenPrice1M=10.00000M, CurrencyCode="RMB", }, + new(){ Id=908, ProviderId=9, Name="glm-4v", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=1.00M, AllowSearch=false, AllowVision=true, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=2048, MaxResponseTokens=1024, TokenizerId=null, InputTokenPrice1M=50.00000M, OutputTokenPrice1M=50.00000M, CurrencyCode="RMB", }, + new(){ Id=1000, ProviderId=10, Name="deepseek-chat", ShortName=null, IsLegacy=false, MinTemperature=0.00M, MaxTemperature=2.00M, AllowSearch=false, AllowVision=false, AllowSystemPrompt=true, AllowStreaming=true, ContextWindow=64000, MaxResponseTokens=4000, TokenizerId=null, InputTokenPrice1M=1.00000M, OutputTokenPrice1M=2.00000M, CurrencyCode="RMB", } ]); } private static void InsertModelProviders(ChatsDB db) { - // Generated from data, hash: ca38a28a3e95fcb5fcd96fa2a35db6a86ee7d356ac7e2a20b8d7d9b52b895abd + // Generated from data, hash: 137fb94c0828ca100273aad14b6d69e7f9caef6a1d69cc96c2667a17a4e31ccc db.ModelProviders.AddRange( [ - new(){ Id=0, Name="Test", InitialHost=null, InitialSecret=null, }, - new(){ Id=1, Name="Azure OpenAI", InitialHost="https://.openai.azure.com/", InitialSecret="", }, - new(){ Id=2, Name="Tencent Hunyuan", InitialHost="hunyuan.tencentcloudapi.com", InitialSecret="""{"secretId":"", "secretKey":""}""", }, - new(){ Id=3, Name="01.ai", InitialHost=null, InitialSecret="", }, - new(){ Id=4, Name="Moonshot", InitialHost=null, InitialSecret="", }, - new(){ Id=5, Name="OpenAI", InitialHost="https://api.openai.com/v1", InitialSecret="", }, - new(){ Id=6, Name="Wenxin Qianfan", InitialHost=null, InitialSecret="""{"apiKey":"", "secret":""}""", }, - new(){ Id=7, Name="DashScope", InitialHost=null, InitialSecret="", }, - new(){ Id=8, Name="Xunfei SparkDesk", InitialHost=null, InitialSecret="""{"appId": "", "apiKey":"", "secret":""}""", }, - new(){ Id=9, Name="Zhipu AI", InitialHost=null, InitialSecret="", } + new(){ Id=0, Name="Test", InitialHost=null, InitialSecret=null, }, + new(){ Id=1, Name="Azure OpenAI", InitialHost="https://.openai.azure.com/", InitialSecret="", }, + new(){ Id=2, Name="Tencent Hunyuan", InitialHost="hunyuan.tencentcloudapi.com", InitialSecret="""{"secretId":"", "secretKey":""}""", }, + new(){ Id=3, Name="01.ai", InitialHost=null, InitialSecret="", }, + new(){ Id=4, Name="Moonshot", InitialHost=null, InitialSecret="", }, + new(){ Id=5, Name="OpenAI", InitialHost="https://api.openai.com/v1", InitialSecret="", }, + new(){ Id=6, Name="Wenxin Qianfan", InitialHost=null, InitialSecret="""{"apiKey":"", "secret":""}""", }, + new(){ Id=7, Name="DashScope", InitialHost=null, InitialSecret="", }, + new(){ Id=8, Name="Xunfei SparkDesk", InitialHost=null, InitialSecret="""{"appId": "", "apiKey":"", "secret":""}""", }, + new(){ Id=9, Name="Zhipu AI", InitialHost=null, InitialSecret="", }, + new(){ Id=10, Name="DeepSeek", InitialHost=null, InitialSecret="", } ]); } }; \ No newline at end of file