From 4b28125ea9034ed165b6f110d45a098ed69fc24a Mon Sep 17 00:00:00 2001 From: Marco Minerva Date: Wed, 27 Mar 2024 14:44:04 +0100 Subject: [PATCH 1/4] Add logprops and top_logprops properties --- src/ChatGptNet/ChatGptClient.cs | 9 ++++-- src/ChatGptNet/ChatGptOptionsBuilder.cs | 4 ++- src/ChatGptNet/Models/ChatGptParameters.cs | 19 ++++++++++++- src/ChatGptNet/Models/ChatGptRequest.cs | 33 ++++++++++++++++------ 4 files changed, 53 insertions(+), 12 deletions(-) diff --git a/src/ChatGptNet/ChatGptClient.cs b/src/ChatGptNet/ChatGptClient.cs index 64fad43..7373e37 100644 --- a/src/ChatGptNet/ChatGptClient.cs +++ b/src/ChatGptNet/ChatGptClient.cs @@ -68,6 +68,9 @@ public async Task AskAsync(Guid conversationId, string message, var requestUri = options.ServiceConfiguration.GetChatCompletionEndpoint(model ?? options.DefaultModel); using var httpResponse = await httpClient.PostAsJsonAsync(requestUri, request, jsonSerializerOptions, cancellationToken); + var json = await httpResponse.Content.ReadAsStringAsync(cancellationToken); + Console.WriteLine(json); + var response = await httpResponse.Content.ReadFromJsonAsync(jsonSerializerOptions, cancellationToken: cancellationToken); NormalizeResponse(httpResponse, response!, conversationId, model ?? options.DefaultModel); @@ -356,8 +359,10 @@ private ChatGptRequest CreateChatGptRequest(IEnumerable messages MaxTokens = parameters?.MaxTokens ?? options.DefaultParameters.MaxTokens, PresencePenalty = parameters?.PresencePenalty ?? options.DefaultParameters.PresencePenalty, FrequencyPenalty = parameters?.FrequencyPenalty ?? options.DefaultParameters.FrequencyPenalty, - User = options.User, - ResponseFormat = parameters?.ResponseFormat ?? options.DefaultParameters.ResponseFormat + ResponseFormat = parameters?.ResponseFormat ?? options.DefaultParameters.ResponseFormat, + LogProbabilities = parameters?.LogProbabilities ?? options.DefaultParameters.LogProbabilities, + TopLogProbabilities = parameters?.TopLogProbabilities ?? options.DefaultParameters.TopLogProbabilities, + User = options.User }; private EmbeddingRequest CreateEmbeddingRequest(IEnumerable messages, EmbeddingParameters? parameters, string? model) diff --git a/src/ChatGptNet/ChatGptOptionsBuilder.cs b/src/ChatGptNet/ChatGptOptionsBuilder.cs index 327a5fd..d42f260 100644 --- a/src/ChatGptNet/ChatGptOptionsBuilder.cs +++ b/src/ChatGptNet/ChatGptOptionsBuilder.cs @@ -90,7 +90,9 @@ internal ChatGptOptionsBuilder(ChatGptOptionsBuilder source) ResponseFormat = source.DefaultParameters?.ResponseFormat, TopP = source.DefaultParameters?.TopP, Temperature = source.DefaultParameters?.Temperature, - Seed = source.DefaultParameters?.Seed + Seed = source.DefaultParameters?.Seed, + LogProbabilities = source.DefaultParameters?.LogProbabilities, + TopLogProbabilities = source.DefaultParameters?.TopLogProbabilities }; DefaultEmbeddingParameters = new() diff --git a/src/ChatGptNet/Models/ChatGptParameters.cs b/src/ChatGptNet/Models/ChatGptParameters.cs index ecd09cf..cec180d 100644 --- a/src/ChatGptNet/Models/ChatGptParameters.cs +++ b/src/ChatGptNet/Models/ChatGptParameters.cs @@ -11,7 +11,7 @@ namespace ChatGptNet.Models; public class ChatGptParameters { /// - /// If specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. + /// Gets or sets a value such that, if specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. /// /// /// Determinism is not guaranteed, and you should refer to the response parameter to monitor changes in the backend. @@ -64,4 +64,21 @@ public class ChatGptParameters /// [JsonPropertyName("response_format")] public ChatGptResponseFormat? ResponseFormat { get; set; } + + /// + /// Gets or set a value that determines whether to return log probabilities of the output tokens or not. If , returns the log probabilities of each output token returned in the content of message (default: ). + /// + /// + [JsonPropertyName("logprobs")] + public bool? LogProbabilities { get; set; } + + /// + /// Gets or sets a value between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability. + /// + /// + /// must be set to if this parameter is used. + /// + /// + [JsonPropertyName("top_logprobs")] + public int? TopLogProbabilities { get; set; } } \ No newline at end of file diff --git a/src/ChatGptNet/Models/ChatGptRequest.cs b/src/ChatGptNet/Models/ChatGptRequest.cs index 2e301d9..9329194 100644 --- a/src/ChatGptNet/Models/ChatGptRequest.cs +++ b/src/ChatGptNet/Models/ChatGptRequest.cs @@ -90,7 +90,7 @@ internal class ChatGptRequest public bool Stream { get; set; } /// - /// If specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. + /// Gets or sets a value such that, if specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. /// /// /// Determinism is not guaranteed, and you should refer to the response parameter to monitor changes in the backend. @@ -142,17 +142,34 @@ internal class ChatGptRequest public double? FrequencyPenalty { get; set; } /// - /// Gets or sets the user identification for chat completion, which can help to monitor and detect abuse. + /// An object specifying the format that the model must output. Used to enable JSON mode. + /// + /// + [JsonPropertyName("response_format")] + public ChatGptResponseFormat? ResponseFormat { get; set; } + + /// + /// Gets or set a value that determines whether to return log probabilities of the output tokens or not. If , returns the log probabilities of each output token returned in the content of message (default: ). + /// + /// + [JsonPropertyName("logprobs")] + public bool? LogProbabilities { get; set; } + + /// + /// Gets or sets a value between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability. /// /// - /// See Safety best practices for more information. + /// must be set to if this parameter is used. /// - public string? User { get; set; } + /// + [JsonPropertyName("top_logprobs")] + public int? TopLogProbabilities { get; set; } /// - /// An object specifying the format that the model must output. Used to enable JSON mode. + /// Gets or sets the user identification for chat completion, which can help to monitor and detect abuse. /// - /// - [JsonPropertyName("response_format")] - public ChatGptResponseFormat? ResponseFormat { get; set; } + /// + /// See Safety best practices for more information. + /// + public string? User { get; set; } } \ No newline at end of file From 92d8b322b9888bfd6bd10284920c0c9b0d5491c6 Mon Sep 17 00:00:00 2001 From: Marco Minerva Date: Wed, 27 Mar 2024 15:03:05 +0100 Subject: [PATCH 2/4] Add logprops response objects --- src/ChatGptNet/Models/ChatGptChoice.cs | 8 +++++ .../Models/ChatGptLogProbability.cs | 12 +++++++ .../Models/ChatGptLogProbabilityContent.cs | 32 +++++++++++++++++++ src/ChatGptNet/Models/ChatGptParameters.cs | 2 +- src/ChatGptNet/Models/ChatGptRequest.cs | 2 +- 5 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 src/ChatGptNet/Models/ChatGptLogProbability.cs create mode 100644 src/ChatGptNet/Models/ChatGptLogProbabilityContent.cs diff --git a/src/ChatGptNet/Models/ChatGptChoice.cs b/src/ChatGptNet/Models/ChatGptChoice.cs index 7a0828b..0d869bc 100644 --- a/src/ChatGptNet/Models/ChatGptChoice.cs +++ b/src/ChatGptNet/Models/ChatGptChoice.cs @@ -56,4 +56,12 @@ public class ChatGptChoice /// [JsonPropertyName("finish_reason")] public string FinishReason { get; set; } = string.Empty; + + /// + /// Gets or sets the log probabilities associated with this . + /// + /// + /// + [JsonPropertyName("logprobs")] + public ChatGptLogProbability? LogProbabilities { get; set; } } diff --git a/src/ChatGptNet/Models/ChatGptLogProbability.cs b/src/ChatGptNet/Models/ChatGptLogProbability.cs new file mode 100644 index 0000000..96ca391 --- /dev/null +++ b/src/ChatGptNet/Models/ChatGptLogProbability.cs @@ -0,0 +1,12 @@ +namespace ChatGptNet.Models; + +/// +/// Represents the log probability information of a completion choice. +/// +public class ChatGptLogProbability +{ + /// + /// Gets or sets the list of message content tokens with log probability information. + /// + public IEnumerable Content { get; set; } = []; +} \ No newline at end of file diff --git a/src/ChatGptNet/Models/ChatGptLogProbabilityContent.cs b/src/ChatGptNet/Models/ChatGptLogProbabilityContent.cs new file mode 100644 index 0000000..0e84998 --- /dev/null +++ b/src/ChatGptNet/Models/ChatGptLogProbabilityContent.cs @@ -0,0 +1,32 @@ +using System.Text.Json.Serialization; + +namespace ChatGptNet.Models; + +/// +/// Represents a message content token with log probability information. +/// +public class ChatGptLogProbabilityContent +{ + /// + /// Gets or sets the token. + /// + public string Token { get; set; } = string.Empty; + + /// + /// Gets or sets the log probability of this token, if it is within the top 20 most likely tokens. Otherwise, the value -9999.0 is used to signify that the token is very unlikely. + /// + [JsonPropertyName("logprob")] + public double LogProbality { get; set; } + + /// + /// Gets or sets a list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be if there is no bytes representation for the token. + /// + public IEnumerable? Bytes { get; set; } = []; + + /// + /// Gets or sets the list of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested returned. + /// + /// + [JsonPropertyName("top_logprobs")] + public IEnumerable? TopLogProbabilities { get; set; } +} \ No newline at end of file diff --git a/src/ChatGptNet/Models/ChatGptParameters.cs b/src/ChatGptNet/Models/ChatGptParameters.cs index cec180d..6d831e1 100644 --- a/src/ChatGptNet/Models/ChatGptParameters.cs +++ b/src/ChatGptNet/Models/ChatGptParameters.cs @@ -73,7 +73,7 @@ public class ChatGptParameters public bool? LogProbabilities { get; set; } /// - /// Gets or sets a value between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability. + /// Gets or sets a value between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. /// /// /// must be set to if this parameter is used. diff --git a/src/ChatGptNet/Models/ChatGptRequest.cs b/src/ChatGptNet/Models/ChatGptRequest.cs index 9329194..d276894 100644 --- a/src/ChatGptNet/Models/ChatGptRequest.cs +++ b/src/ChatGptNet/Models/ChatGptRequest.cs @@ -156,7 +156,7 @@ internal class ChatGptRequest public bool? LogProbabilities { get; set; } /// - /// Gets or sets a value between 0 and 20 specifying the number of most likely tokens to return at each token position, each with an associated log probability. + /// Gets or sets a value between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. /// /// /// must be set to if this parameter is used. From d9e9a496ddc0e2cd4ca3e3687a306234aa637fcb Mon Sep 17 00:00:00 2001 From: Marco Minerva Date: Fri, 3 May 2024 09:27:36 +0200 Subject: [PATCH 3/4] Libraries update --- samples/ChatGptApi/ChatGptApi.csproj | 4 ++-- samples/ChatGptBlazor.Wasm/ChatGptBlazor.Wasm.csproj | 8 ++++---- samples/ChatGptConsole/ChatGptConsole.csproj | 2 +- .../ChatGptFunctionCallingConsole.csproj | 2 +- samples/ChatGptStreamConsole/ChatGptStreamConsole.csproj | 2 +- src/ChatGptNet/ChatGptClient.cs | 7 ++----- 6 files changed, 11 insertions(+), 14 deletions(-) diff --git a/samples/ChatGptApi/ChatGptApi.csproj b/samples/ChatGptApi/ChatGptApi.csproj index 0408806..0245b20 100644 --- a/samples/ChatGptApi/ChatGptApi.csproj +++ b/samples/ChatGptApi/ChatGptApi.csproj @@ -7,8 +7,8 @@ - - + + diff --git a/samples/ChatGptBlazor.Wasm/ChatGptBlazor.Wasm.csproj b/samples/ChatGptBlazor.Wasm/ChatGptBlazor.Wasm.csproj index 766b7a5..2d66213 100644 --- a/samples/ChatGptBlazor.Wasm/ChatGptBlazor.Wasm.csproj +++ b/samples/ChatGptBlazor.Wasm/ChatGptBlazor.Wasm.csproj @@ -8,10 +8,10 @@ - - - - + + + + diff --git a/samples/ChatGptConsole/ChatGptConsole.csproj b/samples/ChatGptConsole/ChatGptConsole.csproj index 60ae28f..0dccab8 100644 --- a/samples/ChatGptConsole/ChatGptConsole.csproj +++ b/samples/ChatGptConsole/ChatGptConsole.csproj @@ -9,7 +9,7 @@ - + diff --git a/samples/ChatGptFunctionCallingConsole/ChatGptFunctionCallingConsole.csproj b/samples/ChatGptFunctionCallingConsole/ChatGptFunctionCallingConsole.csproj index 91451aa..6b2ab31 100644 --- a/samples/ChatGptFunctionCallingConsole/ChatGptFunctionCallingConsole.csproj +++ b/samples/ChatGptFunctionCallingConsole/ChatGptFunctionCallingConsole.csproj @@ -9,7 +9,7 @@ - + diff --git a/samples/ChatGptStreamConsole/ChatGptStreamConsole.csproj b/samples/ChatGptStreamConsole/ChatGptStreamConsole.csproj index 3645cc7..042e760 100644 --- a/samples/ChatGptStreamConsole/ChatGptStreamConsole.csproj +++ b/samples/ChatGptStreamConsole/ChatGptStreamConsole.csproj @@ -9,7 +9,7 @@ - + diff --git a/src/ChatGptNet/ChatGptClient.cs b/src/ChatGptNet/ChatGptClient.cs index 7373e37..78eb94a 100644 --- a/src/ChatGptNet/ChatGptClient.cs +++ b/src/ChatGptNet/ChatGptClient.cs @@ -68,9 +68,6 @@ public async Task AskAsync(Guid conversationId, string message, var requestUri = options.ServiceConfiguration.GetChatCompletionEndpoint(model ?? options.DefaultModel); using var httpResponse = await httpClient.PostAsJsonAsync(requestUri, request, jsonSerializerOptions, cancellationToken); - var json = await httpResponse.Content.ReadAsStringAsync(cancellationToken); - Console.WriteLine(json); - var response = await httpResponse.Content.ReadFromJsonAsync(jsonSerializerOptions, cancellationToken: cancellationToken); NormalizeResponse(httpResponse, response!, conversationId, model ?? options.DefaultModel); @@ -237,7 +234,7 @@ public async Task LoadConversationAsync(Guid conversationId, IEnumerable(); + var conversationHistory = await cache.GetAsync(conversationId, cancellationToken) ?? []; messages = conversationHistory.Union(messages); } @@ -253,7 +250,7 @@ public async Task AddInteractionAsync(Guid conversationId, string question, stri ArgumentNullException.ThrowIfNull(question); ArgumentNullException.ThrowIfNull(answer); - var messages = await cache.GetAsync(conversationId, cancellationToken) ?? Enumerable.Empty(); + var messages = await cache.GetAsync(conversationId, cancellationToken) ?? []; messages = messages.Union([ new() { From 7fd974b67959f097224fef366321cda64bddf15a Mon Sep 17 00:00:00 2001 From: Marco Minerva Date: Fri, 3 May 2024 09:30:35 +0200 Subject: [PATCH 4/4] Update documentation --- docs/ChatGptNet.Models/ChatGptChoice.md | 1 + .../ChatGptChoice/LogProbabilities.md | 16 +++++++++++++ .../ChatGptLogProbability.md | 21 ++++++++++++++++ .../ChatGptLogProbability.md | 14 +++++++++++ .../ChatGptLogProbability/Content.md | 15 ++++++++++++ .../ChatGptLogProbabilityContent.md | 24 +++++++++++++++++++ .../ChatGptLogProbabilityContent/Bytes.md | 14 +++++++++++ .../ChatGptLogProbabilityContent.md | 14 +++++++++++ .../LogProbality.md | 14 +++++++++++ .../ChatGptLogProbabilityContent/Token.md | 14 +++++++++++ .../TopLogProbabilities.md | 15 ++++++++++++ docs/ChatGptNet.Models/ChatGptParameters.md | 4 +++- .../ChatGptParameters/LogProbabilities.md | 15 ++++++++++++ .../ChatGptParameters/Seed.md | 2 +- .../ChatGptParameters/TopLogProbabilities.md | 19 +++++++++++++++ docs/README.md | 2 ++ 16 files changed, 202 insertions(+), 2 deletions(-) create mode 100644 docs/ChatGptNet.Models/ChatGptChoice/LogProbabilities.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbability.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbability/ChatGptLogProbability.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbability/Content.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbabilityContent.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Bytes.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbabilityContent/ChatGptLogProbabilityContent.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbabilityContent/LogProbality.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Token.md create mode 100644 docs/ChatGptNet.Models/ChatGptLogProbabilityContent/TopLogProbabilities.md create mode 100644 docs/ChatGptNet.Models/ChatGptParameters/LogProbabilities.md create mode 100644 docs/ChatGptNet.Models/ChatGptParameters/TopLogProbabilities.md diff --git a/docs/ChatGptNet.Models/ChatGptChoice.md b/docs/ChatGptNet.Models/ChatGptChoice.md index 92b0b1c..e2c60f9 100644 --- a/docs/ChatGptNet.Models/ChatGptChoice.md +++ b/docs/ChatGptNet.Models/ChatGptChoice.md @@ -16,6 +16,7 @@ public class ChatGptChoice | [FinishReason](ChatGptChoice/FinishReason.md) { get; set; } | Gets or sets a value specifying why the choice has been returned. | | [Index](ChatGptChoice/Index.md) { get; set; } | Gets or sets the index of the choice in the list. | | [IsFiltered](ChatGptChoice/IsFiltered.md) { get; } | Gets or sets a value indicating whether the this [`ChatGptChoice`](./ChatGptChoice.md) has been filtered by the content filtering system. | +| [LogProbabilities](ChatGptChoice/LogProbabilities.md) { get; set; } | Gets or sets the log probabilities associated with this [`ChatGptChoice`](./ChatGptChoice.md). | | [Message](ChatGptChoice/Message.md) { get; set; } | Gets or sets the message associated with this [`ChatGptChoice`](./ChatGptChoice.md), if any. | ## See Also diff --git a/docs/ChatGptNet.Models/ChatGptChoice/LogProbabilities.md b/docs/ChatGptNet.Models/ChatGptChoice/LogProbabilities.md new file mode 100644 index 0000000..bd169c1 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptChoice/LogProbabilities.md @@ -0,0 +1,16 @@ +# ChatGptChoice.LogProbabilities property + +Gets or sets the log probabilities associated with this [`ChatGptChoice`](../ChatGptChoice.md). + +```csharp +public ChatGptLogProbability? LogProbabilities { get; set; } +``` + +## See Also + +* class [ChatGptLogProbability](../ChatGptLogProbability.md) +* class [ChatGptLogProbabilityContent](../ChatGptLogProbabilityContent.md) +* class [ChatGptChoice](../ChatGptChoice.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbability.md b/docs/ChatGptNet.Models/ChatGptLogProbability.md new file mode 100644 index 0000000..6f73aa9 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbability.md @@ -0,0 +1,21 @@ +# ChatGptLogProbability class + +Represents the log probability information of a [`completion choice`](./ChatGptChoice.md). + +```csharp +public class ChatGptLogProbability +``` + +## Public Members + +| name | description | +| --- | --- | +| [ChatGptLogProbability](ChatGptLogProbability/ChatGptLogProbability.md)() | The default constructor. | +| [Content](ChatGptLogProbability/Content.md) { get; set; } | Gets or sets the list of message content tokens with log probability information. | + +## See Also + +* namespace [ChatGptNet.Models](../ChatGptNet.md) +* [ChatGptLogProbability.cs](https://github.com/marcominerva/ChatGptNet/tree/master/src/ChatGptNet/Models/ChatGptLogProbability.cs) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbability/ChatGptLogProbability.md b/docs/ChatGptNet.Models/ChatGptLogProbability/ChatGptLogProbability.md new file mode 100644 index 0000000..4422fa3 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbability/ChatGptLogProbability.md @@ -0,0 +1,14 @@ +# ChatGptLogProbability constructor + +The default constructor. + +```csharp +public ChatGptLogProbability() +``` + +## See Also + +* class [ChatGptLogProbability](../ChatGptLogProbability.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbability/Content.md b/docs/ChatGptNet.Models/ChatGptLogProbability/Content.md new file mode 100644 index 0000000..859b4e0 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbability/Content.md @@ -0,0 +1,15 @@ +# ChatGptLogProbability.Content property + +Gets or sets the list of message content tokens with log probability information. + +```csharp +public IEnumerable Content { get; set; } +``` + +## See Also + +* class [ChatGptLogProbabilityContent](../ChatGptLogProbabilityContent.md) +* class [ChatGptLogProbability](../ChatGptLogProbability.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbabilityContent.md b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent.md new file mode 100644 index 0000000..d3918e8 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent.md @@ -0,0 +1,24 @@ +# ChatGptLogProbabilityContent class + +Represents a message content token with log probability information. + +```csharp +public class ChatGptLogProbabilityContent +``` + +## Public Members + +| name | description | +| --- | --- | +| [ChatGptLogProbabilityContent](ChatGptLogProbabilityContent/ChatGptLogProbabilityContent.md)() | The default constructor. | +| [Bytes](ChatGptLogProbabilityContent/Bytes.md) { get; set; } | Gets or sets a list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be `null` if there is no bytes representation for the token. | +| [LogProbality](ChatGptLogProbabilityContent/LogProbality.md) { get; set; } | Gets or sets the log probability of this token, if it is within the top 20 most likely tokens. Otherwise, the value -9999.0 is used to signify that the token is very unlikely. | +| [Token](ChatGptLogProbabilityContent/Token.md) { get; set; } | Gets or sets the token. | +| [TopLogProbabilities](ChatGptLogProbabilityContent/TopLogProbabilities.md) { get; set; } | Gets or sets the list of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested [`TopLogProbabilities`](./ChatGptParameters/TopLogProbabilities.md) returned. | + +## See Also + +* namespace [ChatGptNet.Models](../ChatGptNet.md) +* [ChatGptLogProbabilityContent.cs](https://github.com/marcominerva/ChatGptNet/tree/master/src/ChatGptNet/Models/ChatGptLogProbabilityContent.cs) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Bytes.md b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Bytes.md new file mode 100644 index 0000000..b71fd33 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Bytes.md @@ -0,0 +1,14 @@ +# ChatGptLogProbabilityContent.Bytes property + +Gets or sets a list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be `null` if there is no bytes representation for the token. + +```csharp +public IEnumerable? Bytes { get; set; } +``` + +## See Also + +* class [ChatGptLogProbabilityContent](../ChatGptLogProbabilityContent.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/ChatGptLogProbabilityContent.md b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/ChatGptLogProbabilityContent.md new file mode 100644 index 0000000..86c289f --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/ChatGptLogProbabilityContent.md @@ -0,0 +1,14 @@ +# ChatGptLogProbabilityContent constructor + +The default constructor. + +```csharp +public ChatGptLogProbabilityContent() +``` + +## See Also + +* class [ChatGptLogProbabilityContent](../ChatGptLogProbabilityContent.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/LogProbality.md b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/LogProbality.md new file mode 100644 index 0000000..2fbba3d --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/LogProbality.md @@ -0,0 +1,14 @@ +# ChatGptLogProbabilityContent.LogProbality property + +Gets or sets the log probability of this token, if it is within the top 20 most likely tokens. Otherwise, the value -9999.0 is used to signify that the token is very unlikely. + +```csharp +public double LogProbality { get; set; } +``` + +## See Also + +* class [ChatGptLogProbabilityContent](../ChatGptLogProbabilityContent.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Token.md b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Token.md new file mode 100644 index 0000000..75d272c --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/Token.md @@ -0,0 +1,14 @@ +# ChatGptLogProbabilityContent.Token property + +Gets or sets the token. + +```csharp +public string Token { get; set; } +``` + +## See Also + +* class [ChatGptLogProbabilityContent](../ChatGptLogProbabilityContent.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/TopLogProbabilities.md b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/TopLogProbabilities.md new file mode 100644 index 0000000..8431c45 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptLogProbabilityContent/TopLogProbabilities.md @@ -0,0 +1,15 @@ +# ChatGptLogProbabilityContent.TopLogProbabilities property + +Gets or sets the list of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested [`TopLogProbabilities`](../ChatGptParameters/TopLogProbabilities.md) returned. + +```csharp +public IEnumerable? TopLogProbabilities { get; set; } +``` + +## See Also + +* property [TopLogProbabilities](../ChatGptParameters/TopLogProbabilities.md) +* class [ChatGptLogProbabilityContent](../ChatGptLogProbabilityContent.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptParameters.md b/docs/ChatGptNet.Models/ChatGptParameters.md index 55f2d51..fadb1de 100644 --- a/docs/ChatGptNet.Models/ChatGptParameters.md +++ b/docs/ChatGptNet.Models/ChatGptParameters.md @@ -12,11 +12,13 @@ public class ChatGptParameters | --- | --- | | [ChatGptParameters](ChatGptParameters/ChatGptParameters.md)() | The default constructor. | | [FrequencyPenalty](ChatGptParameters/FrequencyPenalty.md) { get; set; } | Gets or sets the frequency penalties for chat completion. Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim (default: 0). | +| [LogProbabilities](ChatGptParameters/LogProbabilities.md) { get; set; } | Gets or set a value that determines whether to return log probabilities of the output tokens or not. If `true`, returns the log probabilities of each output token returned in the content of message (default: `false`). | | [MaxTokens](ChatGptParameters/MaxTokens.md) { get; set; } | Gets or sets the maximum number of tokens to generate in the chat completion. The total length of input tokens and generated tokens is limited by the model's context length. | | [PresencePenalty](ChatGptParameters/PresencePenalty.md) { get; set; } | Gets or sets the presence penalties for chat completion. Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics (default: 0). | | [ResponseFormat](ChatGptParameters/ResponseFormat.md) { get; set; } | An object specifying the format that the model must output. Used to enable JSON mode. | -| [Seed](ChatGptParameters/Seed.md) { get; set; } | If specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. | +| [Seed](ChatGptParameters/Seed.md) { get; set; } | Gets or sets a value such that, if specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. | | [Temperature](ChatGptParameters/Temperature.md) { get; set; } | Gets or sets what sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic (default: 1). | +| [TopLogProbabilities](ChatGptParameters/TopLogProbabilities.md) { get; set; } | Gets or sets a value between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. | | [TopP](ChatGptParameters/TopP.md) { get; set; } | Gets or sets an alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with [`TopP`](./ChatGptParameters/TopP.md) probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered (default: 1). | ## Remarks diff --git a/docs/ChatGptNet.Models/ChatGptParameters/LogProbabilities.md b/docs/ChatGptNet.Models/ChatGptParameters/LogProbabilities.md new file mode 100644 index 0000000..9b7a601 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptParameters/LogProbabilities.md @@ -0,0 +1,15 @@ +# ChatGptParameters.LogProbabilities property + +Gets or set a value that determines whether to return log probabilities of the output tokens or not. If `true`, returns the log probabilities of each output token returned in the content of message (default: `false`). + +```csharp +public bool? LogProbabilities { get; set; } +``` + +## See Also + +* property [TopLogProbabilities](./TopLogProbabilities.md) +* class [ChatGptParameters](../ChatGptParameters.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/ChatGptNet.Models/ChatGptParameters/Seed.md b/docs/ChatGptNet.Models/ChatGptParameters/Seed.md index b5ed24b..c5da373 100644 --- a/docs/ChatGptNet.Models/ChatGptParameters/Seed.md +++ b/docs/ChatGptNet.Models/ChatGptParameters/Seed.md @@ -1,6 +1,6 @@ # ChatGptParameters.Seed property -If specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. +Gets or sets a value such that, if specified, the system will make a best effort to sample deterministically, such that repeated requests with the same seed and parameters should return the same result. ```csharp public int? Seed { get; set; } diff --git a/docs/ChatGptNet.Models/ChatGptParameters/TopLogProbabilities.md b/docs/ChatGptNet.Models/ChatGptParameters/TopLogProbabilities.md new file mode 100644 index 0000000..fc78765 --- /dev/null +++ b/docs/ChatGptNet.Models/ChatGptParameters/TopLogProbabilities.md @@ -0,0 +1,19 @@ +# ChatGptParameters.TopLogProbabilities property + +Gets or sets a value between 0 and 5 specifying the number of most likely tokens to return at each token position, each with an associated log probability. + +```csharp +public int? TopLogProbabilities { get; set; } +``` + +## Remarks + +[`LogProbabilities`](./LogProbabilities.md)must be set to `true` if this parameter is used. + +## See Also + +* property [LogProbabilities](./LogProbabilities.md) +* class [ChatGptParameters](../ChatGptParameters.md) +* namespace [ChatGptNet.Models](../../ChatGptNet.md) + + diff --git a/docs/README.md b/docs/README.md index 06e28a5..3289e5f 100644 --- a/docs/README.md +++ b/docs/README.md @@ -47,6 +47,8 @@ | class [ChatGptFunction](./ChatGptNet.Models/ChatGptFunction.md) | Represents the description of a function available for ChatGPT. | | class [ChatGptFunctionCall](./ChatGptNet.Models/ChatGptFunctionCall.md) | Represents a response function call. | | class [ChatGptInnerError](./ChatGptNet.Models/ChatGptInnerError.md) | Contains further details about the error. | +| class [ChatGptLogProbability](./ChatGptNet.Models/ChatGptLogProbability.md) | Represents the log probability information of a [`completion choice`](./ChatGptNet.Models/ChatGptChoice.md). | +| class [ChatGptLogProbabilityContent](./ChatGptNet.Models/ChatGptLogProbabilityContent.md) | Represents a message content token with log probability information. | | class [ChatGptMessage](./ChatGptNet.Models/ChatGptMessage.md) | Represents a single chat message. | | class [ChatGptParameters](./ChatGptNet.Models/ChatGptParameters.md) | Represents chat completion parameters. | | class [ChatGptPromptFilterResults](./ChatGptNet.Models/ChatGptPromptFilterResults.md) | Contains information about content filtering for input prompts. |