From 9cda9fafa55bb943881fe6b69826edeb3985782c Mon Sep 17 00:00:00 2001 From: Rodion Mostovoi Date: Sat, 11 Nov 2023 00:13:07 +0800 Subject: [PATCH] Translator and StructuredResponse modules now default to GPT4 model Changed 'ChatGPTTranslatorService' and 'OpenAiClientExtensions.GetStructuredResponse' to use GPT4 model by default. This is in response to GPT4 providing more stable responses compared to the previous models. The 'README.md' file was also updated to reflect these changes. Associated code specific to 'gpt-3.5-turbo-1106' model has been removed from 'OpenAiClient_GetStructuredResponse.cs' test cases as it's now redundant. The change aims to improve the translation quality and stability of responses in the application. --- README.md | 7 +++++-- ...OpenAiClientExtensions.GetStructuredResponse.cs | 14 ++++++++++---- .../ChatGPTTranslatorService.cs | 10 ++++++++-- .../OpenAiClient_GetStructuredResponse.cs | 3 --- 4 files changed, 23 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index f5ea4cf..50db8b4 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,10 @@ [![example gif...](assets/chatgpt_console_spectre_example.gif)](samples/ChatGpt.SpectreConsoleExample/Program.cs) -# ChatGPT integration for .NET +# ChatGPT integration for .NET (+DI) [![Nuget](https://img.shields.io/nuget/v/OpenAI.ChatGPT.EntityFrameworkCore)](https://www.nuget.org/packages/OpenAI.ChatGPT.EntityFrameworkCore/)[![.NET](https://github.com/rodion-m/ChatGPT_API_dotnet/actions/workflows/dotnet.yml/badge.svg)](https://github.com/rodion-m/ChatGPT_API_dotnet/actions/workflows/dotnet.yml) \ OpenAI Chat Completions API (ChatGPT) integration with DI and EF Core supporting. It allows you to use the API in your .NET applications. Also, the client supports streaming responses (like ChatGPT) via async streams. -[NEW!] `StructuredResponse` module allows you to get structured responses from the API as C# object. See: [StructuredResponse](#structuredresponse) section. +## 2023.11 UPD: GPT4Turbo and JSON mode support +`StructuredResponse` module allows you to get structured responses from the API as C# object. See: [StructuredResponse](#structuredresponse) section. ## Content @@ -125,6 +126,8 @@ var message = Dialog City almaty = await _client.GetStructuredResponse(message); Console.WriteLine(almaty); // Name: "Almaty", Country: "Kazakhstan", YearOfFoundation: 1854 ``` +Under the hood, it uses the new [json mode](https://platform.openai.com/docs/guides/text-generation/json-mode) of the API for GPT4Turbo and for the `gpt-3.5-turbo-1106`. Regular GPT4 and GPT3.5Turbo models are also supported, but GPT3.5 responses may be unstable (for GPT3.5 it's strictly recommended to provide `examples` parameter). + More complex examples with arrays, nested objects and enums are available in tests: https://github.com/rodion-m/ChatGPT_API_dotnet/blob/f50d386f0b65a4ba8c1041a28bab2a1a475c2296/tests/OpenAI.ChatGpt.IntegrationTests/OpenAiClientTests/OpenAiClient_GetStructuredResponse.cs#L1 NuGet: https://www.nuget.org/packages/OpenAI.ChatGPT.Modules.StructuredResponse diff --git a/src/modules/OpenAI.ChatGpt.Modules.StructuredResponse/OpenAiClientExtensions.GetStructuredResponse.cs b/src/modules/OpenAI.ChatGpt.Modules.StructuredResponse/OpenAiClientExtensions.GetStructuredResponse.cs index 81db9cd..73c623c 100644 --- a/src/modules/OpenAI.ChatGpt.Modules.StructuredResponse/OpenAiClientExtensions.GetStructuredResponse.cs +++ b/src/modules/OpenAI.ChatGpt.Modules.StructuredResponse/OpenAiClientExtensions.GetStructuredResponse.cs @@ -35,14 +35,14 @@ public static class OpenAiClientExtensions /// The OpenAI client. /// The chat dialog, including a user message and any system messages that set the behavior of the assistant. /// Optional. The maximum number of tokens in the response. Defaults to the limit of the model, minus the number of input tokens, minus 500. - /// Optional. The name of the model to use. Defaults to "text-davinci-002" unless the message input is longer than 6000 tokens, in which case it defaults to "text-davinci-003". + /// Optional. The name of the model to use. Defaults to . It's recommended to use GPT4+. /// Controls the randomness of the assistant’s output. Ranges from 0.0 to 1.0, where 0.0 is deterministic and 1.0 is highly random. Default value is the default for the OpenAI API. - /// Optional. The user who is having the conversation. If not specified, defaults to "system". + /// Optional. The user ID who is having the conversation. /// Optional. A function that can modify the chat completion request before it is sent to the API. /// Optional. A function that can access the raw API response. /// Optional. Custom JSON deserializer options for the deserialization. If not specified, default options with case insensitive property names are used. /// Optional. Custom JSON serializer options for the serialization. - /// Optional. Example of the models those will be serialized using + /// Optional. Example of the models those will be serialized using . /// Optional. A cancellation token that can be used to cancel the operation. /// /// A task that represents the asynchronous operation. The task result contains the deserialized object from the API response. @@ -114,7 +114,13 @@ internal static async Task GetStructuredResponse( { editMsg.Content += GetAdditionalJsonResponsePrompt(responseFormat, examples, jsonSerializerOptions); - (model, maxTokens) = FindOptimalModelAndMaxToken(dialog.GetMessages(), model, maxTokens); + (model, maxTokens) = FindOptimalModelAndMaxToken( + dialog.GetMessages(), + model, + maxTokens, + smallModel: ChatCompletionModels.Gpt4, + bigModel: ChatCompletionModels.Gpt4 + ); var response = await client.GetChatCompletions( dialog, diff --git a/src/modules/OpenAI.ChatGpt.Modules.Translator/ChatGPTTranslatorService.cs b/src/modules/OpenAI.ChatGpt.Modules.Translator/ChatGPTTranslatorService.cs index ee78a95..26a68a2 100644 --- a/src/modules/OpenAI.ChatGpt.Modules.Translator/ChatGPTTranslatorService.cs +++ b/src/modules/OpenAI.ChatGpt.Modules.Translator/ChatGPTTranslatorService.cs @@ -103,7 +103,7 @@ internal virtual string CreateTextTranslationPrompt(string sourceLanguage, strin "In the response write ONLY translated text." + (_extraPrompt is not null ? "\n" + _extraPrompt : ""); } - + public virtual async Task TranslateObject( TObject objectToTranslate, bool isBatch = false, @@ -140,7 +140,13 @@ public virtual async Task TranslateObject( var objectJson = JsonSerializer.Serialize(objectToTranslate, jsonSerializerOptions); var dialog = Dialog.StartAsSystem(prompt).ThenUser(objectJson); var messages = dialog.GetMessages().ToArray(); - (model, maxTokens) = ChatCompletionMessage.FindOptimalModelAndMaxToken(messages, model, maxTokens); + (model, maxTokens) = ChatCompletionMessage.FindOptimalModelAndMaxToken( + messages, + model, + maxTokens, + smallModel: ChatCompletionModels.Gpt4, + bigModel: ChatCompletionModels.Gpt4 + ); var response = await _client.GetStructuredResponse( dialog, maxTokens.Value, diff --git a/tests/OpenAI.ChatGpt.IntegrationTests/OpenAiClientTests/OpenAiClient_GetStructuredResponse.cs b/tests/OpenAI.ChatGpt.IntegrationTests/OpenAiClientTests/OpenAiClient_GetStructuredResponse.cs index a0fd254..d921901 100644 --- a/tests/OpenAI.ChatGpt.IntegrationTests/OpenAiClientTests/OpenAiClient_GetStructuredResponse.cs +++ b/tests/OpenAI.ChatGpt.IntegrationTests/OpenAiClientTests/OpenAiClient_GetStructuredResponse.cs @@ -27,7 +27,6 @@ public async void Get_simple_structured_response_from_ChatGPT(string model) [Theory] [InlineData(ChatCompletionModels.Gpt4Turbo)] [InlineData(ChatCompletionModels.Gpt4)] - [InlineData(ChatCompletionModels.Gpt3_5_Turbo_1106)] public async void Get_structured_response_with_ARRAY_from_ChatGPT(string model) { var message = Dialog @@ -51,7 +50,6 @@ public async void Get_structured_response_with_ARRAY_from_ChatGPT(string model) [Theory] [InlineData(ChatCompletionModels.Gpt4Turbo)] [InlineData(ChatCompletionModels.Gpt4)] - [InlineData(ChatCompletionModels.Gpt3_5_Turbo_1106)] public async void Get_structured_response_with_ENUM_from_ChatGPT(string model) { var message = Dialog @@ -65,7 +63,6 @@ public async void Get_structured_response_with_ENUM_from_ChatGPT(string model) [Theory] [InlineData(ChatCompletionModels.Gpt4Turbo)] [InlineData(ChatCompletionModels.Gpt4)] - [InlineData(ChatCompletionModels.Gpt3_5_Turbo_1106)] public async void Get_structured_response_with_extra_data_from_ChatGPT(string model) { var message = Dialog