diff --git a/src/Cellm.Tests/IntegrationTests.cs b/src/Cellm.Tests/IntegrationTests.cs index 7f9c2b6..637a03e 100644 --- a/src/Cellm.Tests/IntegrationTests.cs +++ b/src/Cellm.Tests/IntegrationTests.cs @@ -4,7 +4,7 @@ namespace Cellm.Tests; -[ExcelTestSettings(AddIn = @"..\..\..\..\Cellm\bin\Debug\net6.0-windows\Cellm-AddIn")] +[ExcelTestSettings(AddIn = @"..\..\..\..\Cellm\bin\Debug\net8.0-windows\Cellm-AddIn")] public class ExcelTests : IDisposable { readonly Workbook _testWorkbook; @@ -54,17 +54,17 @@ public void TestPromptWith() Worksheet ws = (Worksheet)_testWorkbook.Sheets[1]; ws.Range["A1"].Value = "Respond with \"Hello World\""; ws.Range["A2"].Formula = "=PROMPTWITH(\"Anthropic/claude-3-haiku-20240307\",A1)"; - ExcelTestHelper.WaitForCellValue(ws.Range["A2"]); + Automation.Wait(5000); Assert.Equal("Hello World", ws.Range["A2"].Text); ws.Range["B1"].Value = "Respond with \"Hello World\""; ws.Range["B2"].Formula = "=PROMPTWITH(\"OpenAI/gpt-4o-mini\",B1)"; - ExcelTestHelper.WaitForCellValue(ws.Range["B2"]); + Automation.Wait(5000); Assert.Equal("Hello World", ws.Range["B2"].Text); ws.Range["C1"].Value = "Respond with \"Hello World\""; ws.Range["C2"].Formula = "=PROMPTWITH(\"OpenAI/gemini-1.5-flash-latest\",C1)"; - ExcelTestHelper.WaitForCellValue(ws.Range["C2"]); + Automation.Wait(5000); Assert.Equal("Hello World", ws.Range["C2"].Text); } } diff --git a/src/Cellm/AddIn/PromptArgumentParser.cs b/src/Cellm/AddIn/ArgumentParser.cs similarity index 87% rename from src/Cellm/AddIn/PromptArgumentParser.cs rename to src/Cellm/AddIn/ArgumentParser.cs index c216593..8684adf 100644 --- a/src/Cellm/AddIn/PromptArgumentParser.cs +++ b/src/Cellm/AddIn/ArgumentParser.cs @@ -1,7 +1,6 @@ using System.Text; using Cellm.AddIn.Exceptions; -using Cellm.Prompts; -using Cellm.Services.Configuration; +using Cellm.Models.Providers; using ExcelDna.Integration; using Microsoft.Extensions.Configuration; using Microsoft.Office.Interop.Excel; @@ -10,7 +9,7 @@ namespace Cellm.AddIn; public record Arguments(string Provider, string Model, string Context, string Instructions, double Temperature); -public class PromptArgumentParser +public class ArgumentParser { private string? _provider; private string? _model; @@ -20,12 +19,12 @@ public class PromptArgumentParser private readonly IConfiguration _configuration; - public PromptArgumentParser(IConfiguration configuration) + public ArgumentParser(IConfiguration configuration) { _configuration = configuration; } - public PromptArgumentParser AddProvider(object providerAndModel) + public ArgumentParser AddProvider(object providerAndModel) { _provider = providerAndModel switch { @@ -37,7 +36,7 @@ public PromptArgumentParser AddProvider(object providerAndModel) return this; } - public PromptArgumentParser AddModel(object providerAndModel) + public ArgumentParser AddModel(object providerAndModel) { _model = providerAndModel switch { @@ -49,21 +48,21 @@ public PromptArgumentParser AddModel(object providerAndModel) return this; } - public PromptArgumentParser AddInstructionsOrContext(object instructionsOrContext) + public ArgumentParser AddInstructionsOrContext(object instructionsOrContext) { _instructionsOrContext = instructionsOrContext; return this; } - public PromptArgumentParser AddInstructionsOrTemperature(object instructionsOrTemperature) + public ArgumentParser AddInstructionsOrTemperature(object instructionsOrTemperature) { _instructionsOrTemperature = instructionsOrTemperature; return this; } - public PromptArgumentParser AddTemperature(object temperature) + public ArgumentParser AddTemperature(object temperature) { _temperature = temperature; @@ -73,19 +72,19 @@ public PromptArgumentParser AddTemperature(object temperature) public Arguments Parse() { var provider = _provider ?? _configuration - .GetSection(nameof(CellmConfiguration)) - .GetValue(nameof(CellmConfiguration.DefaultProvider)) - ?? throw new ArgumentException(nameof(CellmConfiguration.DefaultProvider)); + .GetSection(nameof(ProviderConfiguration)) + .GetValue(nameof(ProviderConfiguration.DefaultProvider)) + ?? throw new ArgumentException(nameof(ProviderConfiguration.DefaultProvider)); var model = _model ?? _configuration - .GetSection(nameof(CellmConfiguration)) - .GetValue(nameof(CellmConfiguration.DefaultModel)) - ?? throw new ArgumentException(nameof(CellmConfiguration.DefaultModel)); + .GetSection($"{provider}Configuration") + .GetValue(nameof(IProviderConfiguration.DefaultModel)) + ?? throw new ArgumentException(nameof(IProviderConfiguration.DefaultModel)); var defaultTemperature = _configuration - .GetSection(nameof(CellmConfiguration)) - .GetValue(nameof(CellmConfiguration.DefaultTemperature)) - ?? throw new ArgumentException(nameof(CellmConfiguration.DefaultTemperature)); + .GetSection(nameof(ProviderConfiguration)) + .GetValue(nameof(ProviderConfiguration.DefaultTemperature)) + ?? throw new ArgumentException(nameof(ProviderConfiguration.DefaultTemperature)); return (_instructionsOrContext, _instructionsOrTemperature, _temperature) switch { diff --git a/src/Cellm/AddIn/CellmAddIn.cs b/src/Cellm/AddIn/ExcelAddin.cs similarity index 92% rename from src/Cellm/AddIn/CellmAddIn.cs rename to src/Cellm/AddIn/ExcelAddin.cs index 589bd5f..a4bf618 100644 --- a/src/Cellm/AddIn/CellmAddIn.cs +++ b/src/Cellm/AddIn/ExcelAddin.cs @@ -3,7 +3,7 @@ namespace Cellm.AddIn; -public class CellmAddIn : IExcelAddIn +public class ExcelAddin : IExcelAddIn { public void AutoOpen() { diff --git a/src/Cellm/AddIn/CellmFunctions.cs b/src/Cellm/AddIn/ExcelFunctions.cs similarity index 92% rename from src/Cellm/AddIn/CellmFunctions.cs rename to src/Cellm/AddIn/ExcelFunctions.cs index 85b2500..fe50294 100644 --- a/src/Cellm/AddIn/CellmFunctions.cs +++ b/src/Cellm/AddIn/ExcelFunctions.cs @@ -2,15 +2,15 @@ using System.Text; using Cellm.AddIn.Exceptions; using Cellm.Models; -using Cellm.Prompts; +using Cellm.Models.Prompts; +using Cellm.Models.Providers; using Cellm.Services; -using Cellm.Services.Configuration; using ExcelDna.Integration; using Microsoft.Extensions.Configuration; namespace Cellm.AddIn; -public static class CellmFunctions +public static class ExcelFunctions { /// /// Sends a prompt to the default model configured in CellmConfiguration. @@ -35,8 +35,8 @@ public static object Prompt( { var configuration = ServiceLocator.Get(); - var provider = configuration.GetSection(nameof(CellmConfiguration)).GetValue(nameof(CellmConfiguration.DefaultProvider)) - ?? throw new ArgumentException(nameof(CellmConfiguration.DefaultProvider)); + var provider = configuration.GetSection(nameof(ProviderConfiguration)).GetValue(nameof(ProviderConfiguration.DefaultProvider)) + ?? throw new ArgumentException(nameof(ProviderConfiguration.DefaultProvider)); var model = configuration.GetSection($"{provider}Configuration").GetValue(nameof(IProviderConfiguration.DefaultModel)) ?? throw new ArgumentException(nameof(IProviderConfiguration.DefaultModel)); @@ -73,7 +73,7 @@ public static object PromptWith( { try { - var arguments = ServiceLocator.Get() + var arguments = ServiceLocator.Get() .AddProvider(providerAndModel) .AddModel(providerAndModel) .AddInstructionsOrContext(instructionsOrContext) @@ -116,7 +116,7 @@ public static object PromptWith( /// A task that represents the asynchronous operation. The task result contains the model's response as a string. /// Thrown when an unexpected error occurs during the operation. - private static async Task CallModelAsync(Prompt prompt, string? provider = null, Uri? baseAddress = null) + internal static async Task CallModelAsync(Prompt prompt, string? provider = null, Uri? baseAddress = null) { var client = ServiceLocator.Get(); var response = await client.Send(prompt, provider, baseAddress, CancellationToken.None); diff --git a/src/Cellm/Prompts/SystemMessages.cs b/src/Cellm/AddIn/SystemMessages.cs similarity index 94% rename from src/Cellm/Prompts/SystemMessages.cs rename to src/Cellm/AddIn/SystemMessages.cs index bedcad4..971fbb3 100644 --- a/src/Cellm/Prompts/SystemMessages.cs +++ b/src/Cellm/AddIn/SystemMessages.cs @@ -1,4 +1,4 @@ -namespace Cellm.Prompts; +namespace Cellm.AddIn; internal static class SystemMessages { diff --git a/src/Cellm/Cellm.csproj b/src/Cellm/Cellm.csproj index 0164082..9123a11 100644 --- a/src/Cellm/Cellm.csproj +++ b/src/Cellm/Cellm.csproj @@ -27,6 +27,7 @@ + diff --git a/src/Cellm/Models/Anthropic/AnthropicResponse.cs b/src/Cellm/Models/Anthropic/AnthropicResponse.cs deleted file mode 100644 index 78dbc41..0000000 --- a/src/Cellm/Models/Anthropic/AnthropicResponse.cs +++ /dev/null @@ -1,5 +0,0 @@ -using Cellm.Prompts; - -namespace Cellm.Models.Anthropic; - -internal record AnthropicResponse(Prompt Prompt) : IModelResponse; \ No newline at end of file diff --git a/src/Cellm/Models/ModelRequestBehavior/CachingBehavior.cs b/src/Cellm/Models/Behaviors/CacheBehavior.cs similarity index 66% rename from src/Cellm/Models/ModelRequestBehavior/CachingBehavior.cs rename to src/Cellm/Models/Behaviors/CacheBehavior.cs index 5712f77..6dded88 100644 --- a/src/Cellm/Models/ModelRequestBehavior/CachingBehavior.cs +++ b/src/Cellm/Models/Behaviors/CacheBehavior.cs @@ -1,23 +1,23 @@ using System.Text.Json; -using Cellm.Services.Configuration; +using Cellm.Models.Providers; using MediatR; using Microsoft.Extensions.Caching.Hybrid; using Microsoft.Extensions.Options; -namespace Cellm.Models.ModelRequestBehavior; +namespace Cellm.Models.Behaviors; -internal class CachingBehavior(HybridCache cache, IOptions cellmConfiguration) : IPipelineBehavior +internal class CacheBehavior(HybridCache cache, IOptions providerConfiguration) : IPipelineBehavior where TRequest : IModelRequest where TResponse : IModelResponse { private readonly HybridCacheEntryOptions _cacheEntryOptions = new() { - Expiration = TimeSpan.FromSeconds(cellmConfiguration.Value.CacheTimeoutInSeconds) + Expiration = TimeSpan.FromSeconds(providerConfiguration.Value.CacheTimeoutInSeconds) }; public async Task Handle(TRequest request, RequestHandlerDelegate next, CancellationToken cancellationToken) { - if (cellmConfiguration.Value.EnableCache) + if (providerConfiguration.Value.EnableCache) { return await cache.GetOrCreateAsync( JsonSerializer.Serialize(request.Prompt), diff --git a/src/Cellm/Models/ModelRequestBehavior/SentryBehavior.cs b/src/Cellm/Models/Behaviors/SentryBehavior.cs similarity index 93% rename from src/Cellm/Models/ModelRequestBehavior/SentryBehavior.cs rename to src/Cellm/Models/Behaviors/SentryBehavior.cs index 5ee3915..0f80547 100644 --- a/src/Cellm/Models/ModelRequestBehavior/SentryBehavior.cs +++ b/src/Cellm/Models/Behaviors/SentryBehavior.cs @@ -1,6 +1,6 @@ using MediatR; -namespace Cellm.Models.ModelRequestBehavior; +namespace Cellm.Models.Behaviors; internal class SentryBehavior : IPipelineBehavior where TRequest : notnull diff --git a/src/Cellm/Models/Behaviors/ToolBehavior.cs b/src/Cellm/Models/Behaviors/ToolBehavior.cs new file mode 100644 index 0000000..9d19b56 --- /dev/null +++ b/src/Cellm/Models/Behaviors/ToolBehavior.cs @@ -0,0 +1,24 @@ +using Cellm.Models.Behaviors; +using Cellm.Models.Providers; +using MediatR; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Options; + +namespace Cellm.Models.Tools; + +internal class ToolBehavior(IOptions providerConfiguration, IEnumerable functions) + : IPipelineBehavior where TRequest : IModelRequest +{ + private readonly ProviderConfiguration _providerConfiguration = providerConfiguration.Value; + private readonly List _tools = new(functions); + + public async Task Handle(TRequest request, RequestHandlerDelegate next, CancellationToken cancellationToken) + { + if (_providerConfiguration.EnableTools) + { + request.Prompt.Options.Tools = _tools; + } + + return await next(); + } +} diff --git a/src/Cellm/Models/Client.cs b/src/Cellm/Models/Client.cs index d70aabe..feffdcb 100644 --- a/src/Cellm/Models/Client.cs +++ b/src/Cellm/Models/Client.cs @@ -1,40 +1,39 @@ -using System.Text.Json; -using Cellm.AddIn.Exceptions; -using Cellm.Models.Anthropic; -using Cellm.Models.Llamafile; -using Cellm.Models.Ollama; -using Cellm.Models.OpenAi; -using Cellm.Models.OpenAiCompatible; -using Cellm.Prompts; -using Cellm.Services.Configuration; +using Cellm.Models.Exceptions; +using Cellm.Models.Prompts; +using Cellm.Models.Providers; +using Cellm.Models.Providers.Anthropic; +using Cellm.Models.Providers.Llamafile; +using Cellm.Models.Providers.Ollama; +using Cellm.Models.Providers.OpenAi; +using Cellm.Models.Providers.OpenAiCompatible; using MediatR; using Microsoft.Extensions.Options; using Polly.Timeout; namespace Cellm.Models; -internal class Client(ISender sender, IOptions cellmConfiguration) +public class Client(ISender sender, IOptions providerConfiguration) { - private readonly CellmConfiguration _cellmConfiguration = cellmConfiguration.Value; + private readonly ProviderConfiguration _providerConfiguration = providerConfiguration.Value; public async Task Send(Prompt prompt, string? provider, Uri? baseAddress, CancellationToken cancellationToken) { try { - provider ??= _cellmConfiguration.DefaultProvider; + provider ??= _providerConfiguration.DefaultProvider; - if (!Enum.TryParse(provider, true, out var parsedProvider)) + if (!Enum.TryParse(provider, true, out var parsedProvider)) { throw new ArgumentException($"Unsupported provider: {provider}"); } IModelResponse response = parsedProvider switch { - Providers.Anthropic => await sender.Send(new AnthropicRequest(prompt, provider, baseAddress), cancellationToken), - Providers.Llamafile => await sender.Send(new LlamafileRequest(prompt), cancellationToken), - Providers.Ollama => await sender.Send(new OllamaRequest(prompt), cancellationToken), - Providers.OpenAi => await sender.Send(new OpenAiRequest(prompt), cancellationToken), - Providers.OpenAiCompatible => await sender.Send(new OpenAiCompatibleRequest(prompt, baseAddress), cancellationToken), + Provider.Anthropic => await sender.Send(new AnthropicRequest(prompt, provider, baseAddress), cancellationToken), + Provider.Llamafile => await sender.Send(new LlamafileRequest(prompt), cancellationToken), + Provider.Ollama => await sender.Send(new OllamaRequest(prompt), cancellationToken), + Provider.OpenAi => await sender.Send(new OpenAiRequest(prompt), cancellationToken), + Provider.OpenAiCompatible => await sender.Send(new OpenAiCompatibleRequest(prompt, baseAddress), cancellationToken), _ => throw new InvalidOperationException($"Provider {parsedProvider} is defined but not implemented") }; @@ -42,32 +41,20 @@ public async Task Send(Prompt prompt, string? provider, Uri? baseAddress } catch (HttpRequestException ex) { - throw new CellmException($"HTTP request failed: {ex.Message}", ex); - } - catch (JsonException ex) - { - throw new CellmException($"JSON processing failed: {ex.Message}", ex); - } - catch (NotSupportedException ex) - { - throw new CellmException($"Method not supported: {ex.Message}", ex); - } - catch (FileReaderException ex) - { - throw new CellmException($"File could not be read: {ex.Message}", ex); + throw new CellmModelException($"HTTP request failed: {ex.Message}", ex); } catch (NullReferenceException ex) { - throw new CellmException($"Null reference error: {ex.Message}", ex); + throw new CellmModelException($"Null reference error: {ex.Message}", ex); } catch (TimeoutRejectedException ex) { - throw new CellmException($"Request timed out: {ex.Message}", ex); + throw new CellmModelException($"Request timed out: {ex.Message}", ex); } - catch (Exception ex) when (ex is not CellmException) + catch (Exception ex) when (ex is not CellmModelException) { // Handle any other unexpected exceptions - throw new CellmException($"An unexpected error occurred: {ex.Message}", ex); + throw new CellmModelException($"An unexpected error occurred: {ex.Message}", ex); } } } diff --git a/src/Cellm/Models/Exceptions/CellmModelException.cs b/src/Cellm/Models/Exceptions/CellmModelException.cs new file mode 100644 index 0000000..4721532 --- /dev/null +++ b/src/Cellm/Models/Exceptions/CellmModelException.cs @@ -0,0 +1,10 @@ +namespace Cellm.Models.Exceptions; + +public class CellmModelException : Exception +{ + public CellmModelException(string message = "#CELLM_ERROR?") + : base(message) { } + + public CellmModelException(string message, Exception inner) + : base(message, inner) { } +} diff --git a/src/Cellm/Models/Llamafile/LlamafileResponse.cs b/src/Cellm/Models/Llamafile/LlamafileResponse.cs deleted file mode 100644 index 202008f..0000000 --- a/src/Cellm/Models/Llamafile/LlamafileResponse.cs +++ /dev/null @@ -1,5 +0,0 @@ -using Cellm.Prompts; - -namespace Cellm.Models.Llamafile; - -internal record LlamafileResponse(Prompt Prompt) : IModelResponse; \ No newline at end of file diff --git a/src/Cellm/Models/Local/LocalUtilities.cs b/src/Cellm/Models/Local/LocalUtilities.cs deleted file mode 100644 index 995141a..0000000 --- a/src/Cellm/Models/Local/LocalUtilities.cs +++ /dev/null @@ -1,136 +0,0 @@ -using System.Diagnostics; -using System.IO.Compression; -using System.Net.NetworkInformation; -using Cellm.AddIn.Exceptions; - -namespace Cellm.Models.Local; - -internal class LocalUtilities(HttpClient httpClient) -{ - public async Task DownloadFileIfNotExists(Uri uri, string filePath) - { - if (File.Exists(filePath)) - { - return filePath; - } - - var filePathPart = $"{filePath}.part"; - - if (File.Exists(filePathPart)) - { - File.Delete(filePathPart); - } - - var response = await httpClient.GetAsync(uri, HttpCompletionOption.ResponseHeadersRead); - response.EnsureSuccessStatusCode(); - - using (var fileStream = File.Create(filePathPart)) - using (var httpStream = await response.Content.ReadAsStreamAsync()) - { - - await httpStream.CopyToAsync(fileStream); - } - - File.Move(filePathPart, filePath); - - return filePath; - } - - public async Task WaitForServer(Uri endpoint, Process process, int timeOutInSeconds = 30) - { - var startTime = DateTime.UtcNow; - - // Wait max 30 seconds to load model - while ((DateTime.UtcNow - startTime).TotalSeconds < timeOutInSeconds) - { - if (process.HasExited) - { - throw new CellmException($"Server not responding: {endpoint}"); - } - - try - { - var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(1)); - var response = await httpClient.GetAsync(endpoint, cancellationTokenSource.Token); - - if (response.StatusCode == System.Net.HttpStatusCode.OK) - { - // Server is ready - return; - } - } - catch (HttpRequestException) - { - } - catch (TaskCanceledException) - { - } - - // Wait before next attempt - await Task.Delay(100); - } - - process.Kill(); - - throw new CellmException("Failed to run Llamafile, timeout waiting for Llamafile server to start"); - } - - public string CreateCellmDirectory(params string[] subFolders) - { - var folderPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), nameof(Cellm)); - - if (subFolders.Length > 0) - { - folderPath = Path.Combine(subFolders.Prepend(folderPath).ToArray()); - } - - Directory.CreateDirectory(folderPath); - return folderPath; - } - - public string CreateCellmFilePath(string fileName, params string[] subFolders) - { - return Path.Combine(CreateCellmDirectory(subFolders), fileName); - } - - public int FindPort(ushort min = 49152, ushort max = 65535) - { - if (max < min) - { - throw new ArgumentException("Max port must be larger than min port."); - } - - var ipProperties = IPGlobalProperties.GetIPGlobalProperties(); - - var activePorts = ipProperties.GetActiveTcpConnections() - .Where(connection => connection.State != TcpState.Closed) - .Select(connection => connection.LocalEndPoint) - .Concat(ipProperties.GetActiveTcpListeners()) - .Concat(ipProperties.GetActiveUdpListeners()) - .Select(endpoint => endpoint.Port) - .ToArray(); - - var firstInactivePort = Enumerable.Range(min, max) - .Where(port => !activePorts.Contains(port)) - .FirstOrDefault(); - - if (firstInactivePort == default) - { - throw new CellmException($"All local TCP ports between {min} and {max} are currently in use."); - } - - return firstInactivePort; - } - - public string ExtractZipFileIfNotExtracted(string zipFilePath, string targetDirectory) - { - if (Directory.Exists(targetDirectory)) - { - return targetDirectory; - } - - ZipFile.ExtractToDirectory(zipFilePath, targetDirectory, true); - - return targetDirectory; - } -} diff --git a/src/Cellm/Models/ModelRequestBehavior/ToolBehavior.cs b/src/Cellm/Models/ModelRequestBehavior/ToolBehavior.cs deleted file mode 100644 index d38b931..0000000 --- a/src/Cellm/Models/ModelRequestBehavior/ToolBehavior.cs +++ /dev/null @@ -1,27 +0,0 @@ -using Cellm.Services.Configuration; -using Cellm.Tools; -using MediatR; -using Microsoft.Extensions.AI; -using Microsoft.Extensions.Options; - -namespace Cellm.Models.ModelRequestBehavior; - -internal class ToolBehavior(IOptions cellmConfiguration, Functions functions) - : IPipelineBehavior where TRequest : IModelRequest -{ - private readonly CellmConfiguration _cellmConfiguration = cellmConfiguration.Value; - private readonly List _tools = [ - AIFunctionFactory.Create(functions.GlobRequest), - AIFunctionFactory.Create(functions.FileReaderRequest) - ]; - - public async Task Handle(TRequest request, RequestHandlerDelegate next, CancellationToken cancellationToken) - { - if (_cellmConfiguration.EnableTools) - { - request.Prompt.Options.Tools = _tools; - } - - return await next(); - } -} diff --git a/src/Cellm/Models/Ollama/OllamaResponse.cs b/src/Cellm/Models/Ollama/OllamaResponse.cs deleted file mode 100644 index 475d7f9..0000000 --- a/src/Cellm/Models/Ollama/OllamaResponse.cs +++ /dev/null @@ -1,5 +0,0 @@ -using Cellm.Prompts; - -namespace Cellm.Models.Ollama; - -internal record OllamaResponse(Prompt Prompt) : IModelResponse; \ No newline at end of file diff --git a/src/Cellm/Models/Ollama/ServiceCollectionExtensions.cs b/src/Cellm/Models/Ollama/ServiceCollectionExtensions.cs deleted file mode 100644 index f89556e..0000000 --- a/src/Cellm/Models/Ollama/ServiceCollectionExtensions.cs +++ /dev/null @@ -1,38 +0,0 @@ -using Cellm.Services.Configuration; -using Microsoft.Extensions.AI; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; - -namespace Cellm.Models.Ollama; - -internal static class ServiceCollectionExtensions -{ - public static IServiceCollection AddOpenOllamaChatClient(this IServiceCollection services, IConfiguration configuration) - { - var resiliencePipelineConfigurator = new ResiliencePipelineConfigurator(configuration); - - var ollamaConfiguration = configuration.GetRequiredSection(nameof(OllamaConfiguration)).Get() - ?? throw new NullReferenceException(nameof(OllamaConfiguration)); - - services - .AddHttpClient(nameof(Providers.Ollama), ollamaHttpClient => - { - ollamaHttpClient.BaseAddress = ollamaConfiguration.BaseAddress; - ollamaHttpClient.Timeout = TimeSpan.FromHours(1); - }) - .AddResilienceHandler( - $"{nameof(OllamaRequestHandler)}ResiliencePipeline", - resiliencePipelineConfigurator.ConfigureResiliencePipeline); - - services - .AddKeyedChatClient(Providers.Ollama, serviceProvider => new OllamaChatClient( - ollamaConfiguration.BaseAddress, - ollamaConfiguration.DefaultModel, - serviceProvider - .GetRequiredService() - .CreateClient(nameof(Providers.Ollama)))) - .UseFunctionInvocation(); - - return services; - } -} diff --git a/src/Cellm/Models/OpenAi/OpenAiResponse.cs b/src/Cellm/Models/OpenAi/OpenAiResponse.cs deleted file mode 100644 index 22ade00..0000000 --- a/src/Cellm/Models/OpenAi/OpenAiResponse.cs +++ /dev/null @@ -1,5 +0,0 @@ -using Cellm.Prompts; - -namespace Cellm.Models.OpenAi; - -internal record OpenAiResponse(Prompt Prompt) : IModelResponse; diff --git a/src/Cellm/Models/OpenAi/ServiceCollectionExtensions.cs b/src/Cellm/Models/OpenAi/ServiceCollectionExtensions.cs deleted file mode 100644 index 2ad5af9..0000000 --- a/src/Cellm/Models/OpenAi/ServiceCollectionExtensions.cs +++ /dev/null @@ -1,21 +0,0 @@ -using Microsoft.Extensions.AI; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using OpenAI; - -namespace Cellm.Models.OpenAi; - -internal static class ServiceCollectionExtensions -{ - public static IServiceCollection AddOpenAiChatClient(this IServiceCollection services, IConfiguration configuration) - { - var openAiConfiguration = configuration.GetRequiredSection(nameof(OpenAiConfiguration)).Get() - ?? throw new NullReferenceException(nameof(OpenAiConfiguration)); - - services - .AddKeyedChatClient(Providers.OpenAi, new OpenAIClient(openAiConfiguration.ApiKey).AsChatClient(openAiConfiguration.DefaultModel)) - .UseFunctionInvocation(); - - return services; - } -} diff --git a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleRequest.cs b/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleRequest.cs deleted file mode 100644 index fa13108..0000000 --- a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleRequest.cs +++ /dev/null @@ -1,5 +0,0 @@ -using Cellm.Prompts; - -namespace Cellm.Models.OpenAiCompatible; - -internal record OpenAiCompatibleRequest(Prompt Prompt, Uri? BaseAddress) : IModelRequest; diff --git a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleRequestHandler.cs b/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleRequestHandler.cs deleted file mode 100644 index ff3575d..0000000 --- a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleRequestHandler.cs +++ /dev/null @@ -1,43 +0,0 @@ - -using System.ClientModel; -using System.ClientModel.Primitives; -using Cellm.Prompts; -using Microsoft.Extensions.AI; -using Microsoft.Extensions.Options; -using OpenAI; - -namespace Cellm.Models.OpenAiCompatible; - -internal class OpenAiCompatibleRequestHandler(HttpClient httpClient, IOptions openAiCompatibleConfiguration) - : IModelRequestHandler -{ - private readonly OpenAiCompatibleConfiguration _openAiCompatibleConfiguration = openAiCompatibleConfiguration.Value; - - public async Task Handle(OpenAiCompatibleRequest request, CancellationToken cancellationToken) - { - var chatClient = CreateChatClient(request.BaseAddress); - - var chatCompletion = await chatClient.CompleteAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken); - - var prompt = new PromptBuilder(request.Prompt) - .AddMessage(chatCompletion.Message) - .Build(); - - return new OpenAiCompatibleResponse(prompt); - } - - private IChatClient CreateChatClient(Uri? baseAddress) - { - var openAiClient = new OpenAIClient( - new ApiKeyCredential(_openAiCompatibleConfiguration.ApiKey), - new OpenAIClientOptions - { - Transport = new HttpClientPipelineTransport(httpClient), - Endpoint = baseAddress ?? _openAiCompatibleConfiguration.BaseAddress - }); - - return new ChatClientBuilder(openAiClient.AsChatClient(_openAiCompatibleConfiguration.DefaultModel)) - .UseFunctionInvocation() - .Build(); - } -} diff --git a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleResponse.cs b/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleResponse.cs deleted file mode 100644 index fe735cd..0000000 --- a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleResponse.cs +++ /dev/null @@ -1,5 +0,0 @@ -using Cellm.Prompts; - -namespace Cellm.Models.OpenAiCompatible; - -internal record OpenAiCompatibleResponse(Prompt Prompt) : IModelResponse; diff --git a/src/Cellm/Models/OpenAiCompatible/SerrviceCollectionExtensions.cs b/src/Cellm/Models/OpenAiCompatible/SerrviceCollectionExtensions.cs deleted file mode 100644 index 40c8283..0000000 --- a/src/Cellm/Models/OpenAiCompatible/SerrviceCollectionExtensions.cs +++ /dev/null @@ -1,24 +0,0 @@ -using Cellm.Models.Anthropic; -using Cellm.Services.Configuration; -using MediatR; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; - -namespace Cellm.Models.OpenAiCompatible; - -internal static class ServiceCollectionExtensions -{ - public static IServiceCollection AddOpenAiCompatibleChatClient(this IServiceCollection services, IConfiguration configuration) - { - var resiliencePipelineConfigurator = new ResiliencePipelineConfigurator(configuration); - - services - .AddHttpClient, OpenAiCompatibleRequestHandler>(openAiCompatibleHttpClient => - { - openAiCompatibleHttpClient.Timeout = TimeSpan.FromHours(1); - }) - .AddResilienceHandler(nameof(OpenAiCompatibleRequestHandler), resiliencePipelineConfigurator.ConfigureResiliencePipeline); - - return services; - } -} diff --git a/src/Cellm/Prompts/Prompt.cs b/src/Cellm/Models/Prompts/Prompt.cs similarity index 77% rename from src/Cellm/Prompts/Prompt.cs rename to src/Cellm/Models/Prompts/Prompt.cs index 6c12052..9c0086b 100644 --- a/src/Cellm/Prompts/Prompt.cs +++ b/src/Cellm/Models/Prompts/Prompt.cs @@ -1,5 +1,5 @@ using Microsoft.Extensions.AI; -namespace Cellm.Prompts; +namespace Cellm.Models.Prompts; public record Prompt(IList Messages, ChatOptions Options); diff --git a/src/Cellm/Prompts/PromptBuilder.cs b/src/Cellm/Models/Prompts/PromptBuilder.cs similarity index 92% rename from src/Cellm/Prompts/PromptBuilder.cs rename to src/Cellm/Models/Prompts/PromptBuilder.cs index 77a6ebc..4e24903 100644 --- a/src/Cellm/Prompts/PromptBuilder.cs +++ b/src/Cellm/Models/Prompts/PromptBuilder.cs @@ -1,13 +1,11 @@ -using Cellm.AddIn.Exceptions; -using Microsoft.Extensions.AI; +using Microsoft.Extensions.AI; -namespace Cellm.Prompts; +namespace Cellm.Models.Prompts; public class PromptBuilder { private List _messages = new(); private ChatOptions _options = new(); - private List _tools = new(); public PromptBuilder() { diff --git a/src/Cellm/Models/Anthropic/AnthropicConfiguration.cs b/src/Cellm/Models/Providers/Anthropic/AnthropicConfiguration.cs similarity index 84% rename from src/Cellm/Models/Anthropic/AnthropicConfiguration.cs rename to src/Cellm/Models/Providers/Anthropic/AnthropicConfiguration.cs index 0e40e12..598b6b9 100644 --- a/src/Cellm/Models/Anthropic/AnthropicConfiguration.cs +++ b/src/Cellm/Models/Providers/Anthropic/AnthropicConfiguration.cs @@ -1,6 +1,4 @@ -using Cellm.Services.Configuration; - -namespace Cellm.Models.Anthropic; +namespace Cellm.Models.Providers.Anthropic; internal class AnthropicConfiguration : IProviderConfiguration { diff --git a/src/Cellm/Models/Anthropic/AnthropicRequest.cs b/src/Cellm/Models/Providers/Anthropic/AnthropicRequest.cs similarity index 61% rename from src/Cellm/Models/Anthropic/AnthropicRequest.cs rename to src/Cellm/Models/Providers/Anthropic/AnthropicRequest.cs index f0d1f9b..c529809 100644 --- a/src/Cellm/Models/Anthropic/AnthropicRequest.cs +++ b/src/Cellm/Models/Providers/Anthropic/AnthropicRequest.cs @@ -1,5 +1,5 @@ -using Cellm.Prompts; +using Cellm.Models.Prompts; -namespace Cellm.Models.Anthropic; +namespace Cellm.Models.Providers.Anthropic; internal record AnthropicRequest(Prompt Prompt, string? Provider, Uri? BaseAddress) : IModelRequest; \ No newline at end of file diff --git a/src/Cellm/Models/Anthropic/AnthropicRequestHandler.cs b/src/Cellm/Models/Providers/Anthropic/AnthropicRequestHandler.cs similarity index 88% rename from src/Cellm/Models/Anthropic/AnthropicRequestHandler.cs rename to src/Cellm/Models/Providers/Anthropic/AnthropicRequestHandler.cs index 68760d4..5530925 100644 --- a/src/Cellm/Models/Anthropic/AnthropicRequestHandler.cs +++ b/src/Cellm/Models/Providers/Anthropic/AnthropicRequestHandler.cs @@ -2,30 +2,28 @@ using System.Text.Encodings.Web; using System.Text.Json; using System.Text.Json.Serialization; -using Cellm.AddIn.Exceptions; -using Cellm.Models.Anthropic.Models; -using Cellm.Prompts; -using Cellm.Services.Configuration; +using Cellm.Models.Exceptions; +using Cellm.Models.Prompts; using Microsoft.Extensions.AI; using Microsoft.Extensions.Options; -namespace Cellm.Models.Anthropic; +namespace Cellm.Models.Providers.Anthropic; internal class AnthropicRequestHandler : IModelRequestHandler { private readonly AnthropicConfiguration _anthropicConfiguration; - private readonly CellmConfiguration _cellmConfiguration; + private readonly ProviderConfiguration _providerConfiguration; private readonly HttpClient _httpClient; private readonly Serde _serde; public AnthropicRequestHandler( IOptions anthropicConfiguration, - IOptions cellmConfiguration, + IOptions providerConfiguration, HttpClient httpClient, Serde serde) { _anthropicConfiguration = anthropicConfiguration.Value; - _cellmConfiguration = cellmConfiguration.Value; + _providerConfiguration = providerConfiguration.Value; _httpClient = httpClient; _serde = serde; } @@ -56,7 +54,8 @@ public string Serialize(AnthropicRequest request) System = request.Prompt.Messages.Where(x => x.Role == ChatRole.System).First().Text, Messages = request.Prompt.Messages.Where(x => x.Role != ChatRole.System).Select(x => new AnthropicMessage { Content = x.Text, Role = x.Role.ToString().ToLower() }).ToList(), Model = request.Prompt.Options.ModelId ?? _anthropicConfiguration.DefaultModel, - Temperature = request.Prompt.Options.Temperature ?? _cellmConfiguration.DefaultTemperature, + Temperature = request.Prompt.Options.Temperature ?? _providerConfiguration.DefaultTemperature, + MaxTokens = 2048 }; return _serde.Serialize(requestBody, new JsonSerializerOptions @@ -76,7 +75,7 @@ public AnthropicResponse Deserialize(AnthropicRequest request, string response) DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull }); - var assistantMessage = responseBody?.Content?.Last()?.Text ?? throw new CellmException("#EMPTY_RESPONSE?"); + var assistantMessage = responseBody?.Content?.Last()?.Text ?? throw new CellmModelException("#EMPTY_RESPONSE?"); var prompt = new PromptBuilder(request.Prompt) .AddAssistantMessage(assistantMessage) diff --git a/src/Cellm/Models/Providers/Anthropic/AnthropicResponse.cs b/src/Cellm/Models/Providers/Anthropic/AnthropicResponse.cs new file mode 100644 index 0000000..fea19c8 --- /dev/null +++ b/src/Cellm/Models/Providers/Anthropic/AnthropicResponse.cs @@ -0,0 +1,5 @@ +using Cellm.Models.Prompts; + +namespace Cellm.Models.Providers.Anthropic; + +internal record AnthropicResponse(Prompt Prompt) : IModelResponse; \ No newline at end of file diff --git a/src/Cellm/Models/Anthropic/Models.cs b/src/Cellm/Models/Providers/Anthropic/Models.cs similarity index 96% rename from src/Cellm/Models/Anthropic/Models.cs rename to src/Cellm/Models/Providers/Anthropic/Models.cs index 8d83591..0d333ee 100644 --- a/src/Cellm/Models/Anthropic/Models.cs +++ b/src/Cellm/Models/Providers/Anthropic/Models.cs @@ -1,6 +1,6 @@ using System.Text.Json.Serialization; -namespace Cellm.Models.Anthropic.Models; +namespace Cellm.Models.Providers.Anthropic; public class AnthropicResponseBody { diff --git a/src/Cellm/Models/IModelRequest.cs b/src/Cellm/Models/Providers/IModelRequest.cs similarity index 82% rename from src/Cellm/Models/IModelRequest.cs rename to src/Cellm/Models/Providers/IModelRequest.cs index 81c18be..8aeb7b3 100644 --- a/src/Cellm/Models/IModelRequest.cs +++ b/src/Cellm/Models/Providers/IModelRequest.cs @@ -1,4 +1,4 @@ -using Cellm.Prompts; +using Cellm.Models.Prompts; using MediatR; namespace Cellm.Models; diff --git a/src/Cellm/Models/IModelRequestHandler.cs b/src/Cellm/Models/Providers/IModelRequestHandler.cs similarity index 100% rename from src/Cellm/Models/IModelRequestHandler.cs rename to src/Cellm/Models/Providers/IModelRequestHandler.cs diff --git a/src/Cellm/Models/IModelResponse.cs b/src/Cellm/Models/Providers/IModelResponse.cs similarity index 73% rename from src/Cellm/Models/IModelResponse.cs rename to src/Cellm/Models/Providers/IModelResponse.cs index 1338c7c..d194ca9 100644 --- a/src/Cellm/Models/IModelResponse.cs +++ b/src/Cellm/Models/Providers/IModelResponse.cs @@ -1,4 +1,4 @@ -using Cellm.Prompts; +using Cellm.Models.Prompts; namespace Cellm.Models; diff --git a/src/Cellm/Models/Providers/IProviderConfiguration.cs b/src/Cellm/Models/Providers/IProviderConfiguration.cs new file mode 100644 index 0000000..3a0b343 --- /dev/null +++ b/src/Cellm/Models/Providers/IProviderConfiguration.cs @@ -0,0 +1,6 @@ +namespace Cellm.Models.Providers; + +public interface IProviderConfiguration +{ + string DefaultModel { get; init; } +} diff --git a/src/Cellm/Models/IProviderRequest.cs b/src/Cellm/Models/Providers/IProviderRequest.cs similarity index 100% rename from src/Cellm/Models/IProviderRequest.cs rename to src/Cellm/Models/Providers/IProviderRequest.cs diff --git a/src/Cellm/Models/IProviderRequestHandler.cs b/src/Cellm/Models/Providers/IProviderRequestHandler.cs similarity index 100% rename from src/Cellm/Models/IProviderRequestHandler.cs rename to src/Cellm/Models/Providers/IProviderRequestHandler.cs diff --git a/src/Cellm/Models/Llamafile/LlamafileConfiguration.cs b/src/Cellm/Models/Providers/Llamafile/LlamafileConfiguration.cs similarity index 88% rename from src/Cellm/Models/Llamafile/LlamafileConfiguration.cs rename to src/Cellm/Models/Providers/Llamafile/LlamafileConfiguration.cs index d0dc760..c604575 100644 --- a/src/Cellm/Models/Llamafile/LlamafileConfiguration.cs +++ b/src/Cellm/Models/Providers/Llamafile/LlamafileConfiguration.cs @@ -1,6 +1,4 @@ -using Cellm.Services.Configuration; - -namespace Cellm.Models.Llamafile; +namespace Cellm.Models.Providers.Llamafile; internal class LlamafileConfiguration : IProviderConfiguration { diff --git a/src/Cellm/Models/Llamafile/LlamafileRequest.cs b/src/Cellm/Models/Providers/Llamafile/LlamafileRequest.cs similarity index 53% rename from src/Cellm/Models/Llamafile/LlamafileRequest.cs rename to src/Cellm/Models/Providers/Llamafile/LlamafileRequest.cs index ae45bae..870c858 100644 --- a/src/Cellm/Models/Llamafile/LlamafileRequest.cs +++ b/src/Cellm/Models/Providers/Llamafile/LlamafileRequest.cs @@ -1,5 +1,5 @@ -using Cellm.Prompts; +using Cellm.Models.Prompts; -namespace Cellm.Models.Llamafile; +namespace Cellm.Models.Providers.Llamafile; internal record LlamafileRequest(Prompt Prompt) : IProviderRequest; \ No newline at end of file diff --git a/src/Cellm/Models/Llamafile/LlamafileRequestHandler.cs b/src/Cellm/Models/Providers/Llamafile/LlamafileRequestHandler.cs similarity index 69% rename from src/Cellm/Models/Llamafile/LlamafileRequestHandler.cs rename to src/Cellm/Models/Providers/Llamafile/LlamafileRequestHandler.cs index 7788ed9..e5b5b13 100644 --- a/src/Cellm/Models/Llamafile/LlamafileRequestHandler.cs +++ b/src/Cellm/Models/Providers/Llamafile/LlamafileRequestHandler.cs @@ -1,13 +1,12 @@ using System.Diagnostics; -using Cellm.AddIn.Exceptions; -using Cellm.Models.Local; -using Cellm.Models.OpenAiCompatible; -using Cellm.Services.Configuration; +using Cellm.Models.Exceptions; +using Cellm.Models.Local.Utilities; +using Cellm.Models.Providers.OpenAiCompatible; using MediatR; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -namespace Cellm.Models.Llamafile; +namespace Cellm.Models.Providers.Llamafile; internal class LlamafileRequestHandler : IProviderRequestHandler { @@ -16,35 +15,34 @@ private record Llamafile(string ModelPath, Uri BaseAddress, Process Process); private readonly AsyncLazy _llamafileExePath; private readonly Dictionary> _llamafiles; private readonly ProcessManager _processManager; + private readonly FileManager _fileManager; + private readonly ServerManager _serverManager; - private readonly CellmConfiguration _cellmConfiguration; private readonly LlamafileConfiguration _llamafileConfiguration; private readonly ISender _sender; - private readonly HttpClient _httpClient; - private readonly LocalUtilities _localUtilities; private readonly ILogger _logger; - public LlamafileRequestHandler(IOptions cellmConfiguration, + public LlamafileRequestHandler( IOptions llamafileConfiguration, ISender sender, HttpClient httpClient, - LocalUtilities localUtilities, + FileManager fileManager, ProcessManager processManager, + ServerManager serverManager, ILogger logger) { - _cellmConfiguration = cellmConfiguration.Value; _llamafileConfiguration = llamafileConfiguration.Value; _sender = sender; - _httpClient = httpClient; - _localUtilities = localUtilities; + _fileManager = fileManager; _processManager = processManager; + _serverManager = serverManager; _logger = logger; _llamafileExePath = new AsyncLazy(async () => { var llamafileName = Path.GetFileName(_llamafileConfiguration.LlamafileUrl.Segments.Last()); - return await _localUtilities.DownloadFileIfNotExists(_llamafileConfiguration.LlamafileUrl, _localUtilities.CreateCellmFilePath(CreateModelFileName($"{llamafileName}.exe"), "Llamafile")); + return await _fileManager.DownloadFileIfNotExists(_llamafileConfiguration.LlamafileUrl, _fileManager.CreateCellmFilePath(CreateModelFileName($"{llamafileName}.exe"), "Llamafile")); }); _llamafiles = _llamafileConfiguration.Models.ToDictionary(x => x.Key, x => new AsyncLazy(async () => @@ -53,13 +51,13 @@ public LlamafileRequestHandler(IOptions cellmConfiguration, var exePath = await _llamafileExePath; // Download model - var modelPath = await _localUtilities.DownloadFileIfNotExists(x.Value, _localUtilities.CreateCellmFilePath(CreateModelFileName(x.Key), "Llamafile")); + var modelPath = await _fileManager.DownloadFileIfNotExists(x.Value, _fileManager.CreateCellmFilePath(CreateModelFileName(x.Key), "Llamafile")); // Start server var baseAddress = new UriBuilder( _llamafileConfiguration.BaseAddress.Scheme, _llamafileConfiguration.BaseAddress.Host, - _localUtilities.FindPort(), + _serverManager.FindPort(), _llamafileConfiguration.BaseAddress.AbsolutePath).Uri; var process = await StartProcess(exePath, modelPath, baseAddress); @@ -98,27 +96,24 @@ private async Task StartProcess(string exePath, string modelPath, Uri b processStartInfo.UseShellExecute = false; processStartInfo.CreateNoWindow = true; - processStartInfo.RedirectStandardError = _cellmConfiguration.Debug; - processStartInfo.RedirectStandardOutput = _cellmConfiguration.Debug; + processStartInfo.RedirectStandardError = true; + processStartInfo.RedirectStandardOutput = true; - var process = Process.Start(processStartInfo) ?? throw new CellmException("Failed to run Llamafile"); + var process = Process.Start(processStartInfo) ?? throw new CellmModelException("Failed to run Llamafile"); - if (_cellmConfiguration.Debug) + process.OutputDataReceived += (sender, e) => { - process.OutputDataReceived += (sender, e) => + if (!string.IsNullOrEmpty(e.Data)) { - if (!string.IsNullOrEmpty(e.Data)) - { - _logger.LogDebug(e.Data); - } - }; - - process.BeginOutputReadLine(); - process.BeginErrorReadLine(); - } + _logger.LogDebug(e.Data); + } + }; + + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); var uriBuilder = new UriBuilder(baseAddress.Scheme, baseAddress.Host, baseAddress.Port, "/health"); - await _localUtilities.WaitForServer(uriBuilder.Uri, process); + await _serverManager.WaitForServer(uriBuilder.Uri, process); // Kill Llamafile when Excel exits or dies _processManager.AssignProcessToExcel(process); diff --git a/src/Cellm/Models/Providers/Llamafile/LlamafileResponse.cs b/src/Cellm/Models/Providers/Llamafile/LlamafileResponse.cs new file mode 100644 index 0000000..4a320b3 --- /dev/null +++ b/src/Cellm/Models/Providers/Llamafile/LlamafileResponse.cs @@ -0,0 +1,5 @@ +using Cellm.Models.Prompts; + +namespace Cellm.Models.Providers.Llamafile; + +internal record LlamafileResponse(Prompt Prompt) : IModelResponse; \ No newline at end of file diff --git a/src/Cellm/Models/Ollama/OllamaConfiguration.cs b/src/Cellm/Models/Providers/Ollama/OllamaConfiguration.cs similarity index 80% rename from src/Cellm/Models/Ollama/OllamaConfiguration.cs rename to src/Cellm/Models/Providers/Ollama/OllamaConfiguration.cs index d234a47..ac9e72a 100644 --- a/src/Cellm/Models/Ollama/OllamaConfiguration.cs +++ b/src/Cellm/Models/Providers/Ollama/OllamaConfiguration.cs @@ -1,6 +1,4 @@ -using Cellm.Services.Configuration; - -namespace Cellm.Models.Ollama; +namespace Cellm.Models.Providers.Ollama; internal class OllamaConfiguration : IProviderConfiguration { diff --git a/src/Cellm/Models/Ollama/OllamaRequest.cs b/src/Cellm/Models/Providers/Ollama/OllamaRequest.cs similarity index 52% rename from src/Cellm/Models/Ollama/OllamaRequest.cs rename to src/Cellm/Models/Providers/Ollama/OllamaRequest.cs index eeba25e..d4ba9f0 100644 --- a/src/Cellm/Models/Ollama/OllamaRequest.cs +++ b/src/Cellm/Models/Providers/Ollama/OllamaRequest.cs @@ -1,5 +1,5 @@ -using Cellm.Prompts; +using Cellm.Models.Prompts; -namespace Cellm.Models.Ollama; +namespace Cellm.Models.Providers.Ollama; internal record OllamaRequest(Prompt Prompt) : IModelRequest; diff --git a/src/Cellm/Models/Ollama/OllamaRequestHandler.cs b/src/Cellm/Models/Providers/Ollama/OllamaRequestHandler.cs similarity index 73% rename from src/Cellm/Models/Ollama/OllamaRequestHandler.cs rename to src/Cellm/Models/Providers/Ollama/OllamaRequestHandler.cs index 2e5df86..19ca048 100644 --- a/src/Cellm/Models/Ollama/OllamaRequestHandler.cs +++ b/src/Cellm/Models/Providers/Ollama/OllamaRequestHandler.cs @@ -2,16 +2,15 @@ using System.Net.Http.Json; using System.Text; using System.Text.Json; -using Cellm.AddIn.Exceptions; -using Cellm.Models.Local; -using Cellm.Prompts; -using Cellm.Services.Configuration; +using Cellm.Models.Exceptions; +using Cellm.Models.Local.Utilities; +using Cellm.Models.Prompts; using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -namespace Cellm.Models.Ollama; +namespace Cellm.Models.Providers.Ollama; internal class OllamaRequestHandler : IModelRequestHandler { @@ -22,45 +21,45 @@ record Model(string Name); record Progress(string Status); private readonly IChatClient _chatClient; - private readonly CellmConfiguration _cellmConfiguration; private readonly OllamaConfiguration _ollamaConfiguration; private readonly HttpClient _httpClient; - private readonly LocalUtilities _localUtilities; + private readonly FileManager _fileManager; private readonly ProcessManager _processManager; + private readonly ServerManager _serverManager; private readonly ILogger _logger; private readonly AsyncLazy _ollamaExePath; private readonly AsyncLazy _ollamaServer; public OllamaRequestHandler( - [FromKeyedServices(Providers.Ollama)] IChatClient chatClient, + [FromKeyedServices(Provider.Ollama)] IChatClient chatClient, IHttpClientFactory httpClientFactory, - IOptions cellmConfiguration, IOptions ollamaConfiguration, - LocalUtilities localUtilities, + FileManager fileManager, ProcessManager processManager, + ServerManager serverManager, ILogger logger) { _chatClient = chatClient; - _httpClient = httpClientFactory.CreateClient(nameof(Providers.Ollama)); - _cellmConfiguration = cellmConfiguration.Value; + _httpClient = httpClientFactory.CreateClient(nameof(Provider.Ollama)); _ollamaConfiguration = ollamaConfiguration.Value; - _localUtilities = localUtilities; + _fileManager = fileManager; _processManager = processManager; + _serverManager = serverManager; _logger = logger; _ollamaExePath = new AsyncLazy(async () => { var zipFileName = string.Join("-", _ollamaConfiguration.ZipUrl.Segments.Select(x => x.Replace("/", string.Empty)).TakeLast(2)); - var zipFilePath = _localUtilities.CreateCellmFilePath(zipFileName); + var zipFilePath = _fileManager.CreateCellmFilePath(zipFileName); - await _localUtilities.DownloadFileIfNotExists( + await _fileManager.DownloadFileIfNotExists( _ollamaConfiguration.ZipUrl, zipFilePath); - var ollamaPath = _localUtilities.ExtractZipFileIfNotExtracted( + var ollamaPath = _fileManager.ExtractZipFileIfNotExtracted( zipFilePath, - _localUtilities.CreateCellmDirectory(nameof(Ollama), Path.GetFileNameWithoutExtension(zipFileName))); + _fileManager.CreateCellmDirectory(nameof(Ollama), Path.GetFileNameWithoutExtension(zipFileName))); return Path.Combine(ollamaPath, "ollama.exe"); }); @@ -114,7 +113,7 @@ private async Task ServerIsRunning(Uri baseAddress) private async Task ModelIsDownloaded(Uri baseAddress, string modelId) { - var tags = await _httpClient.GetFromJsonAsync("api/tags") ?? throw new CellmException(); + var tags = await _httpClient.GetFromJsonAsync("api/tags") ?? throw new CellmModelException(); return tags.Models.Select(x => x.Name).Contains(modelId); } @@ -133,12 +132,12 @@ private async Task DownloadModel(Uri baseAddress, string modelId) if (progress is null || progress.Last().Status != "success") { - throw new CellmException($"Ollama failed to download model {modelId}"); + throw new CellmModelException($"Ollama failed to download model {modelId}"); } } catch (HttpRequestException ex) { - throw new CellmException($"Ollama failed to download model {modelId} or {modelId} does not exist", ex); + throw new CellmModelException($"Ollama failed to download model {modelId} or {modelId} does not exist", ex); } } @@ -151,28 +150,25 @@ private async Task StartProcess(string ollamaExePath, Uri baseAddress) processStartInfo.UseShellExecute = false; processStartInfo.CreateNoWindow = true; - processStartInfo.RedirectStandardError = _cellmConfiguration.Debug; - processStartInfo.RedirectStandardOutput = _cellmConfiguration.Debug; + processStartInfo.RedirectStandardError = true; + processStartInfo.RedirectStandardOutput = true; - var process = Process.Start(processStartInfo) ?? throw new CellmException("Failed to run Ollama"); + var process = Process.Start(processStartInfo) ?? throw new CellmModelException("Failed to run Ollama"); - if (_cellmConfiguration.Debug) + process.OutputDataReceived += (sender, e) => { - process.OutputDataReceived += (sender, e) => + if (!string.IsNullOrEmpty(e.Data)) { - if (!string.IsNullOrEmpty(e.Data)) - { - _logger.LogDebug(e.Data); - Debug.WriteLine(e.Data); - } - }; - - process.BeginOutputReadLine(); - process.BeginErrorReadLine(); - } + _logger.LogDebug(e.Data); + Debug.WriteLine(e.Data); + } + }; + + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); var address = new Uri(baseAddress, "/v1/models"); - await _localUtilities.WaitForServer(address, process); + await _serverManager.WaitForServer(address, process); // Kill Ollama when Excel exits or dies _processManager.AssignProcessToExcel(process); diff --git a/src/Cellm/Models/Providers/Ollama/OllamaResponse.cs b/src/Cellm/Models/Providers/Ollama/OllamaResponse.cs new file mode 100644 index 0000000..045bc00 --- /dev/null +++ b/src/Cellm/Models/Providers/Ollama/OllamaResponse.cs @@ -0,0 +1,5 @@ +using Cellm.Models.Prompts; + +namespace Cellm.Models.Providers.Ollama; + +internal record OllamaResponse(Prompt Prompt) : IModelResponse; \ No newline at end of file diff --git a/src/Cellm/Models/OpenAi/OpenAiConfiguration.cs b/src/Cellm/Models/Providers/OpenAi/OpenAiConfiguration.cs similarity index 54% rename from src/Cellm/Models/OpenAi/OpenAiConfiguration.cs rename to src/Cellm/Models/Providers/OpenAi/OpenAiConfiguration.cs index e963d98..3bd69d8 100644 --- a/src/Cellm/Models/OpenAi/OpenAiConfiguration.cs +++ b/src/Cellm/Models/Providers/OpenAi/OpenAiConfiguration.cs @@ -1,6 +1,6 @@ -namespace Cellm.Models.OpenAi; +namespace Cellm.Models.Providers.OpenAi; -internal class OpenAiConfiguration +internal class OpenAiConfiguration : IProviderConfiguration { public string DefaultModel { get; init; } = string.Empty; diff --git a/src/Cellm/Models/OpenAi/OpenAiRequest.cs b/src/Cellm/Models/Providers/OpenAi/OpenAiRequest.cs similarity index 52% rename from src/Cellm/Models/OpenAi/OpenAiRequest.cs rename to src/Cellm/Models/Providers/OpenAi/OpenAiRequest.cs index 88e314d..51e92b7 100644 --- a/src/Cellm/Models/OpenAi/OpenAiRequest.cs +++ b/src/Cellm/Models/Providers/OpenAi/OpenAiRequest.cs @@ -1,5 +1,5 @@ -using Cellm.Prompts; +using Cellm.Models.Prompts; -namespace Cellm.Models.OpenAi; +namespace Cellm.Models.Providers.OpenAi; internal record OpenAiRequest(Prompt Prompt) : IModelRequest; diff --git a/src/Cellm/Models/OpenAi/OpenAiRequestHandler.cs b/src/Cellm/Models/Providers/OpenAi/OpenAiRequestHandler.cs similarity index 69% rename from src/Cellm/Models/OpenAi/OpenAiRequestHandler.cs rename to src/Cellm/Models/Providers/OpenAi/OpenAiRequestHandler.cs index 76c6c8d..519a5ac 100644 --- a/src/Cellm/Models/OpenAi/OpenAiRequestHandler.cs +++ b/src/Cellm/Models/Providers/OpenAi/OpenAiRequestHandler.cs @@ -1,10 +1,10 @@ -using Cellm.Prompts; +using Cellm.Models.Prompts; using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; -namespace Cellm.Models.OpenAi; +namespace Cellm.Models.Providers.OpenAi; -internal class OpenAiRequestHandler([FromKeyedServices(Providers.OpenAi)] IChatClient chatClient) : IModelRequestHandler +internal class OpenAiRequestHandler([FromKeyedServices(Provider.OpenAi)] IChatClient chatClient) : IModelRequestHandler { public async Task Handle(OpenAiRequest request, CancellationToken cancellationToken) diff --git a/src/Cellm/Models/Providers/OpenAi/OpenAiResponse.cs b/src/Cellm/Models/Providers/OpenAi/OpenAiResponse.cs new file mode 100644 index 0000000..70b5871 --- /dev/null +++ b/src/Cellm/Models/Providers/OpenAi/OpenAiResponse.cs @@ -0,0 +1,5 @@ +using Cellm.Models.Prompts; + +namespace Cellm.Models.Providers.OpenAi; + +internal record OpenAiResponse(Prompt Prompt) : IModelResponse; diff --git a/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleChatClientFactory.cs b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleChatClientFactory.cs new file mode 100644 index 0000000..198544b --- /dev/null +++ b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleChatClientFactory.cs @@ -0,0 +1,24 @@ +using System.ClientModel; +using System.ClientModel.Primitives; +using Microsoft.Extensions.AI; +using OpenAI; + +namespace Cellm.Models.Providers.OpenAiCompatible; + +internal class OpenAiCompatibleChatClientFactory(HttpClient httpClient) +{ + public IChatClient Create(Uri BaseAddress, string modelId, string apiKey) + { + var openAiClient = new OpenAIClient( + new ApiKeyCredential(apiKey), + new OpenAIClientOptions + { + Transport = new HttpClientPipelineTransport(httpClient), + Endpoint = BaseAddress + }); + + return new ChatClientBuilder(openAiClient.AsChatClient(modelId)) + .UseFunctionInvocation() + .Build(); + } +} diff --git a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleConfiguration.cs b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleConfiguration.cs similarity index 80% rename from src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleConfiguration.cs rename to src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleConfiguration.cs index 53fffbe..2c7a83c 100644 --- a/src/Cellm/Models/OpenAiCompatible/OpenAiCompatibleConfiguration.cs +++ b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleConfiguration.cs @@ -1,4 +1,4 @@ -namespace Cellm.Models.OpenAiCompatible; +namespace Cellm.Models.Providers.OpenAiCompatible; internal class OpenAiCompatibleConfiguration { diff --git a/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleRequest.cs b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleRequest.cs new file mode 100644 index 0000000..611f780 --- /dev/null +++ b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleRequest.cs @@ -0,0 +1,8 @@ +using Cellm.Models.Prompts; + +namespace Cellm.Models.Providers.OpenAiCompatible; + +internal record OpenAiCompatibleRequest( + Prompt Prompt, + Uri? BaseAddress = null, + string? ApiKey = null) : IModelRequest; diff --git a/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleRequestHandler.cs b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleRequestHandler.cs new file mode 100644 index 0000000..91bd328 --- /dev/null +++ b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleRequestHandler.cs @@ -0,0 +1,28 @@ +using Cellm.Models.Prompts; +using Microsoft.Extensions.Options; + +namespace Cellm.Models.Providers.OpenAiCompatible; + +internal class OpenAiCompatibleRequestHandler( + OpenAiCompatibleChatClientFactory openAiCompatibleChatClientFactory, + IOptions openAiCompatibleConfiguration) + : IModelRequestHandler +{ + private readonly OpenAiCompatibleConfiguration _openAiCompatibleConfiguration = openAiCompatibleConfiguration.Value; + + public async Task Handle(OpenAiCompatibleRequest request, CancellationToken cancellationToken) + { + var chatClient = openAiCompatibleChatClientFactory.Create( + request.BaseAddress ?? _openAiCompatibleConfiguration.BaseAddress, + request.Prompt.Options.ModelId ?? _openAiCompatibleConfiguration.DefaultModel, + request.ApiKey ?? _openAiCompatibleConfiguration.ApiKey); + + var chatCompletion = await chatClient.CompleteAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken); + + var prompt = new PromptBuilder(request.Prompt) + .AddMessage(chatCompletion.Message) + .Build(); + + return new OpenAiCompatibleResponse(prompt); + } +} diff --git a/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleResponse.cs b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleResponse.cs new file mode 100644 index 0000000..c4620d0 --- /dev/null +++ b/src/Cellm/Models/Providers/OpenAiCompatible/OpenAiCompatibleResponse.cs @@ -0,0 +1,5 @@ +using Cellm.Models.Prompts; + +namespace Cellm.Models.Providers.OpenAiCompatible; + +internal record OpenAiCompatibleResponse(Prompt Prompt) : IModelResponse; diff --git a/src/Cellm/Models/Providers.cs b/src/Cellm/Models/Providers/Provider.cs similarity index 57% rename from src/Cellm/Models/Providers.cs rename to src/Cellm/Models/Providers/Provider.cs index f9a590b..04c4840 100644 --- a/src/Cellm/Models/Providers.cs +++ b/src/Cellm/Models/Providers/Provider.cs @@ -1,6 +1,6 @@ -namespace Cellm.Models; +namespace Cellm.Models.Providers; -public enum Providers +public enum Provider { Anthropic, Llamafile, diff --git a/src/Cellm/Models/Providers/ProviderConfiguration.cs b/src/Cellm/Models/Providers/ProviderConfiguration.cs new file mode 100644 index 0000000..9e1ea59 --- /dev/null +++ b/src/Cellm/Models/Providers/ProviderConfiguration.cs @@ -0,0 +1,18 @@ +namespace Cellm.Models.Providers; + +public class ProviderConfiguration : IProviderConfiguration +{ + public string DefaultProvider { get; init; } = string.Empty; + + public string DefaultModel { get; init; } = string.Empty; + + public double DefaultTemperature { get; init; } + + public bool EnableTools { get; init; } = true; + + public bool EnableCache { get; init; } = true; + + public int CacheTimeoutInSeconds { get; init; } = 3600; + + public int HttpTimeoutInSeconds { get; init; } = 600; +} diff --git a/src/Cellm/Models/Llamafile/AsyncLazy.cs b/src/Cellm/Models/Providers/Utilities/AsyncLazy.cs similarity index 97% rename from src/Cellm/Models/Llamafile/AsyncLazy.cs rename to src/Cellm/Models/Providers/Utilities/AsyncLazy.cs index acdb271..26ea70e 100644 --- a/src/Cellm/Models/Llamafile/AsyncLazy.cs +++ b/src/Cellm/Models/Providers/Utilities/AsyncLazy.cs @@ -1,5 +1,7 @@ using System.Runtime.CompilerServices; +namespace Cellm.Models.Local.Utilities; + /// /// Provides threadsafe asynchronous lazy initialization. This type is fully threadsafe. /// diff --git a/src/Cellm/Models/Providers/Utilities/FileManager.cs b/src/Cellm/Models/Providers/Utilities/FileManager.cs new file mode 100644 index 0000000..f10a397 --- /dev/null +++ b/src/Cellm/Models/Providers/Utilities/FileManager.cs @@ -0,0 +1,65 @@ +using System.IO.Compression; + +namespace Cellm.Models.Local.Utilities; + +internal class FileManager(HttpClient httpClient) +{ + public async Task DownloadFileIfNotExists(Uri uri, string filePath) + { + if (File.Exists(filePath)) + { + return filePath; + } + + var filePathPart = $"{filePath}.part"; + + if (File.Exists(filePathPart)) + { + File.Delete(filePathPart); + } + + var response = await httpClient.GetAsync(uri, HttpCompletionOption.ResponseHeadersRead); + response.EnsureSuccessStatusCode(); + + using (var fileStream = File.Create(filePathPart)) + using (var httpStream = await response.Content.ReadAsStreamAsync()) + { + + await httpStream.CopyToAsync(fileStream); + } + + File.Move(filePathPart, filePath); + + return filePath; + } + + public string CreateCellmDirectory(params string[] subFolders) + { + var folderPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), nameof(Cellm)); + + if (subFolders.Length > 0) + { + folderPath = Path.Combine(subFolders.Prepend(folderPath).ToArray()); + } + + Directory.CreateDirectory(folderPath); + return folderPath; + } + + public string CreateCellmFilePath(string fileName, params string[] subFolders) + { + return Path.Combine(CreateCellmDirectory(subFolders), fileName); + } + + public string ExtractZipFileIfNotExtracted(string zipFilePath, string targetDirectory) + { + if (Directory.Exists(targetDirectory)) + { + return targetDirectory; + } + + ZipFile.ExtractToDirectory(zipFilePath, targetDirectory, true); + + return targetDirectory; + } +} diff --git a/src/Cellm/Models/Local/ProcessManager.cs b/src/Cellm/Models/Providers/Utilities/ProcessManager.cs similarity index 98% rename from src/Cellm/Models/Local/ProcessManager.cs rename to src/Cellm/Models/Providers/Utilities/ProcessManager.cs index da2d4c9..95b51a7 100644 --- a/src/Cellm/Models/Local/ProcessManager.cs +++ b/src/Cellm/Models/Providers/Utilities/ProcessManager.cs @@ -1,6 +1,8 @@ using System.Diagnostics; using System.Runtime.InteropServices; +namespace Cellm.Models.Local.Utilities; + public class ProcessManager { [DllImport("kernel32.dll", CharSet = CharSet.Unicode)] diff --git a/src/Cellm/Models/Providers/Utilities/ServerManager.cs b/src/Cellm/Models/Providers/Utilities/ServerManager.cs new file mode 100644 index 0000000..271dbaf --- /dev/null +++ b/src/Cellm/Models/Providers/Utilities/ServerManager.cs @@ -0,0 +1,76 @@ +using System.Diagnostics; +using System.Net.NetworkInformation; +using Cellm.Models.Exceptions; + +namespace Cellm.Models.Local.Utilities; + +internal class ServerManager(HttpClient httpClient) +{ + public async Task WaitForServer(Uri endpoint, Process process, int timeOutInSeconds = 30) + { + var startTime = DateTime.UtcNow; + + // Wait max 30 seconds to load model + while ((DateTime.UtcNow - startTime).TotalSeconds < timeOutInSeconds) + { + if (process.HasExited) + { + throw new CellmModelException($"Process exited early: {endpoint}"); + } + + try + { + var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(1)); + var response = await httpClient.GetAsync(endpoint, cancellationTokenSource.Token); + + if (response.StatusCode == System.Net.HttpStatusCode.OK) + { + // Server is ready + return; + } + } + catch (HttpRequestException) + { + } + catch (TaskCanceledException) + { + } + + // Wait before next attempt + await Task.Delay(100); + } + + process.Kill(); + + throw new CellmModelException("Failed to run Llamafile, timeout waiting for Llamafile server to start"); + } + + public int FindPort(ushort min = 49152, ushort max = 65535) + { + if (max < min) + { + throw new ArgumentException("Max port must be larger than min port."); + } + + var ipProperties = IPGlobalProperties.GetIPGlobalProperties(); + + var activePorts = ipProperties.GetActiveTcpConnections() + .Where(connection => connection.State != TcpState.Closed) + .Select(connection => connection.LocalEndPoint) + .Concat(ipProperties.GetActiveTcpListeners()) + .Concat(ipProperties.GetActiveUdpListeners()) + .Select(endpoint => endpoint.Port) + .ToArray(); + + var firstInactivePort = Enumerable.Range(min, max) + .Where(port => !activePorts.Contains(port)) + .FirstOrDefault(); + + if (firstInactivePort == default) + { + throw new CellmModelException($"All local TCP ports between {min} and {max} are currently in use."); + } + + return firstInactivePort; + } +} diff --git a/src/Cellm/Services/Configuration/CircuitBreakerConfiguration.cs b/src/Cellm/Models/Resilience/CircuitBreakerConfiguration.cs similarity index 85% rename from src/Cellm/Services/Configuration/CircuitBreakerConfiguration.cs rename to src/Cellm/Models/Resilience/CircuitBreakerConfiguration.cs index 5ae0846..f7e7fda 100644 --- a/src/Cellm/Services/Configuration/CircuitBreakerConfiguration.cs +++ b/src/Cellm/Models/Resilience/CircuitBreakerConfiguration.cs @@ -1,4 +1,4 @@ -namespace Cellm.Services.Configuration; +namespace Cellm.Models.Resilience; public class CircuitBreakerConfiguration { diff --git a/src/Cellm/Services/Configuration/RateLimiterConfiguration.cs b/src/Cellm/Models/Resilience/RateLimiterConfiguration.cs similarity index 86% rename from src/Cellm/Services/Configuration/RateLimiterConfiguration.cs rename to src/Cellm/Models/Resilience/RateLimiterConfiguration.cs index 402a019..3f0edd9 100644 --- a/src/Cellm/Services/Configuration/RateLimiterConfiguration.cs +++ b/src/Cellm/Models/Resilience/RateLimiterConfiguration.cs @@ -1,4 +1,4 @@ -namespace Cellm.Services.Configuration; +namespace Cellm.Models.Resilience; public class RateLimiterConfiguration { diff --git a/src/Cellm/Services/Configuration/ResiliencePipelineConfigurator.cs b/src/Cellm/Models/Resilience/ResiliencePipelineConfigurator.cs similarity index 91% rename from src/Cellm/Services/Configuration/ResiliencePipelineConfigurator.cs rename to src/Cellm/Models/Resilience/ResiliencePipelineConfigurator.cs index f3b5356..c42b901 100644 --- a/src/Cellm/Services/Configuration/ResiliencePipelineConfigurator.cs +++ b/src/Cellm/Models/Resilience/ResiliencePipelineConfigurator.cs @@ -1,24 +1,25 @@ using System.Net; using System.Threading.RateLimiting; +using Cellm.Models.Providers; using Microsoft.Extensions.Configuration; using Polly; using Polly.CircuitBreaker; using Polly.Retry; using Polly.Timeout; -namespace Cellm.Services.Configuration; +namespace Cellm.Models.Resilience; public class ResiliencePipelineConfigurator { - private readonly CellmConfiguration _cellmConfiguration; + private readonly ProviderConfiguration _providerConfiguration; private readonly RateLimiterConfiguration _rateLimiterConfiguration; private readonly CircuitBreakerConfiguration _circuitBreakerConfiguration; private readonly RetryConfiguration _retryConfiguration; public ResiliencePipelineConfigurator(IConfiguration configuration) { - _cellmConfiguration = configuration.GetRequiredSection(nameof(CellmConfiguration)).Get() - ?? throw new NullReferenceException(nameof(CellmConfiguration)); + _providerConfiguration = configuration.GetRequiredSection(nameof(ProviderConfiguration)).Get() + ?? throw new NullReferenceException(nameof(ProviderConfiguration)); _rateLimiterConfiguration = configuration.GetRequiredSection(nameof(RateLimiterConfiguration)).Get() ?? throw new NullReferenceException(nameof(RateLimiterConfiguration)); @@ -61,7 +62,7 @@ public void ConfigureResiliencePipeline(ResiliencePipelineBuilder(TSerialize value, JsonSerializerOptions? opt public TDeserialize Deserialize(string value, JsonSerializerOptions? options = null) { - return JsonSerializer.Deserialize(value, options ?? _defaultOptions) ?? throw new CellmException($"Failed to deserialize {value} to {typeof(TDeserialize).Name}"); + return JsonSerializer.Deserialize(value, options ?? _defaultOptions) ?? throw new CellmModelException($"Failed to deserialize {value} to {typeof(TDeserialize).Name}"); } } diff --git a/src/Cellm/Models/ServiceCollectionExtensions.cs b/src/Cellm/Models/ServiceCollectionExtensions.cs new file mode 100644 index 0000000..9d201b5 --- /dev/null +++ b/src/Cellm/Models/ServiceCollectionExtensions.cs @@ -0,0 +1,158 @@ +using Cellm.Models.Behaviors; +using Cellm.Models.Local.Utilities; +using Cellm.Models.Providers; +using Cellm.Models.Providers.Anthropic; +using Cellm.Models.Providers.Ollama; +using Cellm.Models.Providers.OpenAi; +using Cellm.Models.Providers.OpenAiCompatible; +using Cellm.Models.Resilience; +using Cellm.Models.Tools; +using MediatR; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using OpenAI; + +namespace Cellm.Models; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddAnthropicChatClient(this IServiceCollection services, IConfiguration configuration) + { + var resiliencePipelineConfigurator = new ResiliencePipelineConfigurator(configuration); + + var anthropicConfiguration = configuration.GetRequiredSection(nameof(AnthropicConfiguration)).Get() + ?? throw new NullReferenceException(nameof(AnthropicConfiguration)); + + services + .AddHttpClient, AnthropicRequestHandler>(anthropicHttpClient => + { + anthropicHttpClient.BaseAddress = anthropicConfiguration.BaseAddress; + anthropicHttpClient.DefaultRequestHeaders.Add("x-api-key", anthropicConfiguration.ApiKey); + anthropicHttpClient.DefaultRequestHeaders.Add("anthropic-version", anthropicConfiguration.Version); + anthropicHttpClient.Timeout = TimeSpan.FromHours(1); + }) + .AddResilienceHandler($"{nameof(AnthropicRequestHandler)}", resiliencePipelineConfigurator.ConfigureResiliencePipeline); + + // TODO: Add IChatClient-compatible Anthropic client + + return services; + } + + public static IServiceCollection AddLlamafileChatClient(this IServiceCollection services, IConfiguration configuration) + { + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } + + public static IServiceCollection AddOpenOllamaChatClient(this IServiceCollection services, IConfiguration configuration) + { + var resiliencePipelineConfigurator = new ResiliencePipelineConfigurator(configuration); + + var ollamaConfiguration = configuration.GetRequiredSection(nameof(OllamaConfiguration)).Get() + ?? throw new NullReferenceException(nameof(OllamaConfiguration)); + + services + .AddHttpClient(nameof(Provider.Ollama), ollamaHttpClient => + { + ollamaHttpClient.BaseAddress = ollamaConfiguration.BaseAddress; + ollamaHttpClient.Timeout = TimeSpan.FromHours(1); + }) + .AddResilienceHandler( + $"{nameof(OllamaRequestHandler)}", + resiliencePipelineConfigurator.ConfigureResiliencePipeline); + + services + .AddKeyedChatClient(Provider.Ollama, serviceProvider => new OllamaChatClient( + ollamaConfiguration.BaseAddress, + ollamaConfiguration.DefaultModel, + serviceProvider.GetRequiredService().CreateClient(nameof(Provider.Ollama)))) + .UseFunctionInvocation(); + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + + return services; + } + + public static IServiceCollection AddOpenAiChatClient(this IServiceCollection services, IConfiguration configuration) + { + var openAiConfiguration = configuration.GetRequiredSection(nameof(OpenAiConfiguration)).Get() + ?? throw new NullReferenceException(nameof(OpenAiConfiguration)); + + services + .AddKeyedChatClient(Provider.OpenAi, new OpenAIClient(openAiConfiguration.ApiKey).AsChatClient(openAiConfiguration.DefaultModel)) + .UseFunctionInvocation(); + + return services; + } + + public static IServiceCollection AddOpenAiCompatibleChatClient(this IServiceCollection services, IConfiguration configuration) + { + var resiliencePipelineConfigurator = new ResiliencePipelineConfigurator(configuration); + + services + .AddSingleton() + .AddHttpClient(openAiCompatibleHttpClient => + { + openAiCompatibleHttpClient.Timeout = TimeSpan.FromHours(1); + }) + .AddResilienceHandler(nameof(OpenAiCompatibleChatClientFactory), resiliencePipelineConfigurator.ConfigureResiliencePipeline); + + + return services; + } + + public static IServiceCollection AddSentryBehavior(this IServiceCollection services) + { + services + .AddSingleton(typeof(IPipelineBehavior<,>), typeof(SentryBehavior<,>)); + + return services; + } + + public static IServiceCollection AddCachingBehavior(this IServiceCollection services) + { +#pragma warning disable EXTEXP0018 // Type is for evaluation purposes only and is subject to change or removal in future updates. + services + .AddHybridCache(); +#pragma warning restore EXTEXP0018 // Type is for evaluation purposes only and is subject to change or removal in future updates. + + services + .AddSingleton(typeof(IPipelineBehavior<,>), typeof(CacheBehavior<,>)); + + return services; + } + + public static IServiceCollection AddToolBehavior(this IServiceCollection services) + { + return services.AddSingleton(typeof(IPipelineBehavior<,>), typeof(ToolBehavior<,>)); + } + + public static IServiceCollection AddTools(this IServiceCollection services, params Delegate[] tools) + { + foreach (var tool in tools) + { + services.AddSingleton(AIFunctionFactory.Create(tool)); + } + + return services; + } + + public static IServiceCollection AddTools(this IServiceCollection services, params Func[] toolBuilders) + { + + + foreach (var toolBuilder in toolBuilders) + { + services.AddSingleton((serviceProvider) => toolBuilder(serviceProvider)); + } + + return services; + } +} diff --git a/src/Cellm/Services/Configuration/CellmConfiguration.cs b/src/Cellm/Services/Configuration/CellmConfiguration.cs index 8253ce1..5b810d9 100644 --- a/src/Cellm/Services/Configuration/CellmConfiguration.cs +++ b/src/Cellm/Services/Configuration/CellmConfiguration.cs @@ -3,19 +3,5 @@ public class CellmConfiguration { public bool Debug { get; init; } - - public string DefaultProvider { get; init; } = string.Empty; - - public string DefaultModel { get; init; } = string.Empty; - - public double DefaultTemperature { get; init; } - - public int HttpTimeoutInSeconds { get; init; } - - public int CacheTimeoutInSeconds { get; init; } - - public bool EnableCache { get; init; } - - public bool EnableTools { get; init; } } diff --git a/src/Cellm/Services/Configuration/IProviderConfiguration.cs b/src/Cellm/Services/Configuration/IProviderConfiguration.cs deleted file mode 100644 index 31c34ee..0000000 --- a/src/Cellm/Services/Configuration/IProviderConfiguration.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace Cellm.Services.Configuration; - -internal interface IProviderConfiguration -{ - Uri BaseAddress { get; init; } - - string DefaultModel { get; init; } -} diff --git a/src/Cellm/Services/ServiceLocator.cs b/src/Cellm/Services/ServiceLocator.cs index 1d560e7..cbf9b6d 100644 --- a/src/Cellm/Services/ServiceLocator.cs +++ b/src/Cellm/Services/ServiceLocator.cs @@ -2,18 +2,18 @@ using Cellm.AddIn; using Cellm.AddIn.Exceptions; using Cellm.Models; -using Cellm.Models.Anthropic; -using Cellm.Models.Llamafile; -using Cellm.Models.Local; -using Cellm.Models.ModelRequestBehavior; -using Cellm.Models.Ollama; -using Cellm.Models.OpenAi; -using Cellm.Models.OpenAiCompatible; +using Cellm.Models.Providers; +using Cellm.Models.Providers.Anthropic; +using Cellm.Models.Providers.Llamafile; +using Cellm.Models.Providers.Ollama; +using Cellm.Models.Providers.OpenAi; +using Cellm.Models.Providers.OpenAiCompatible; +using Cellm.Models.Resilience; using Cellm.Services.Configuration; using Cellm.Tools; using Cellm.Tools.FileReader; using ExcelDna.Integration; -using MediatR; +using Microsoft.Extensions.AI; using Microsoft.Extensions.Caching.Memory; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; @@ -24,6 +24,7 @@ namespace Cellm.Services; internal static class ServiceLocator { private static readonly Lazy _serviceProvider = new(() => ConfigureServices(new ServiceCollection()).BuildServiceProvider()); + internal static string? ConfigurationPath { get; set; } = ExcelDnaUtil.XllPathInfo?.Directory?.FullName; public static IServiceProvider ServiceProvider => _serviceProvider.Value; @@ -48,6 +49,7 @@ private static IServiceCollection ConfigureServices(IServiceCollection services) services .Configure(configuration.GetRequiredSection(nameof(CellmConfiguration))) + .Configure(configuration.GetRequiredSection(nameof(ProviderConfiguration))) .Configure(configuration.GetRequiredSection(nameof(AnthropicConfiguration))) .Configure(configuration.GetRequiredSection(nameof(OllamaConfiguration))) .Configure(configuration.GetRequiredSection(nameof(OpenAiConfiguration))) @@ -88,53 +90,35 @@ private static IServiceCollection ConfigureServices(IServiceCollection services) // Internals services - .AddSingleton(configuration) .AddMediatR(mediatrConfiguration => mediatrConfiguration.RegisterServicesFromAssembly(Assembly.GetExecutingAssembly())) - .AddTransient() + .AddSingleton(configuration) + .AddTransient() .AddSingleton() - .AddSingleton() - .AddSingleton() - .AddSingleton(); - -#pragma warning disable EXTEXP0018 // Type is for evaluation purposes only and is subject to change or removal in future updates. - services - .AddHybridCache(); -#pragma warning restore EXTEXP0018 // Type is for evaluation purposes only and is subject to change or removal in future updates. - - // Tools - services - .AddSingleton() - .AddSingleton() - .AddSingleton() - .AddSingleton(); - - // Model Providers - - var resiliencePipelineConfigurator = new ResiliencePipelineConfigurator(configuration); - - var anthropicConfiguration = configuration.GetRequiredSection(nameof(AnthropicConfiguration)).Get() - ?? throw new NullReferenceException(nameof(AnthropicConfiguration)); - - services - .AddHttpClient, AnthropicRequestHandler>(anthropicHttpClient => - { - anthropicHttpClient.BaseAddress = anthropicConfiguration.BaseAddress; - anthropicHttpClient.DefaultRequestHeaders.Add("x-api-key", anthropicConfiguration.ApiKey); - anthropicHttpClient.DefaultRequestHeaders.Add("anthropic-version", anthropicConfiguration.Version); - anthropicHttpClient.Timeout = TimeSpan.FromHours(1); - }) - .AddResilienceHandler($"{nameof(AnthropicRequestHandler)}ResiliencePipeline", resiliencePipelineConfigurator.ConfigureResiliencePipeline); + .AddSingleton(); + // Add providers services + .AddAnthropicChatClient(configuration) + .AddLlamafileChatClient(configuration) .AddOpenAiChatClient(configuration) .AddOpenAiCompatibleChatClient(configuration) .AddOpenOllamaChatClient(configuration); - // Model request pipeline + // Add model request middleware services - .AddSingleton(typeof(IPipelineBehavior<,>), typeof(SentryBehavior<,>)) - .AddSingleton(typeof(IPipelineBehavior<,>), typeof(CachingBehavior<,>)) - .AddSingleton(typeof(IPipelineBehavior<,>), typeof(ToolBehavior<,>)); + .AddSentryBehavior() + .AddCachingBehavior() + .AddToolBehavior(); + + // Add tools + services + .AddSingleton() + .AddSingleton() + .AddSingleton() + .AddSingleton() + .AddTools( + serviceProvider => AIFunctionFactory.Create(serviceProvider.GetRequiredService().GlobRequest), + serviceProvider => AIFunctionFactory.Create(serviceProvider.GetRequiredService().FileReaderRequest)); return services; } diff --git a/src/Cellm/appsettings.json b/src/Cellm/appsettings.json index 98fe061..089df9d 100644 --- a/src/Cellm/appsettings.json +++ b/src/Cellm/appsettings.json @@ -27,10 +27,14 @@ "DefaultModel": "gpt-4o-mini" }, "CellmConfiguration": { + "Debug": false + }, + "ProviderConfiguration": { "DefaultProvider": "OpenAI", + "DefaultModel": "gpt-4o-mini", "DefaultTemperature": 0, - "HttpTimeoutInSeconds": 600, "CacheTimeoutInSeconds": 3600, + "HttpTimeoutInSeconds": 600, "EnableCache": true, "EnableTools": true }, diff --git a/src/Cellm/packages.lock.json b/src/Cellm/packages.lock.json index 1ae3b88..a1052e4 100644 --- a/src/Cellm/packages.lock.json +++ b/src/Cellm/packages.lock.json @@ -93,6 +93,15 @@ "Microsoft.Extensions.Primitives": "9.0.0" } }, + "Microsoft.Extensions.Configuration.Abstractions": { + "type": "Direct", + "requested": "[9.0.0, )", + "resolved": "9.0.0", + "contentHash": "lqvd7W3FGKUO1+ZoUEMaZ5XDJeWvjpy2/M/ptCGz3tXLD4HWVaSzjufsAsjemasBEg+2SxXVtYVvGt5r2nKDlg==", + "dependencies": { + "Microsoft.Extensions.Primitives": "9.0.0" + } + }, "Microsoft.Extensions.Configuration.Json": { "type": "Direct", "requested": "[9.0.0, )", @@ -306,14 +315,6 @@ "Microsoft.Extensions.ObjectPool": "8.0.11" } }, - "Microsoft.Extensions.Configuration.Abstractions": { - "type": "Transitive", - "resolved": "9.0.0", - "contentHash": "lqvd7W3FGKUO1+ZoUEMaZ5XDJeWvjpy2/M/ptCGz3tXLD4HWVaSzjufsAsjemasBEg+2SxXVtYVvGt5r2nKDlg==", - "dependencies": { - "Microsoft.Extensions.Primitives": "9.0.0" - } - }, "Microsoft.Extensions.Configuration.Binder": { "type": "Transitive", "resolved": "9.0.0",