diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..9dfb1a4b --- /dev/null +++ b/.editorconfig @@ -0,0 +1,61 @@ +# https://docs.microsoft.com/en-us/dotnet/fundamentals/code-analysis/code-style-rule-options + +# Remove the line below if you want to inherit .editorconfig settings from higher directories +root = true + +[*.cs] + +#### Core EditorConfig Options #### + +# Indentation and spacing +indent_size = 4 +indent_style = space +tab_width = 4 + +# New line preferences +end_of_line = crlf +insert_final_newline = true +trim_trailing_whitespace = true +csharp_new_line_before_catch = true +csharp_new_line_before_else = true +csharp_new_line_before_finally = true +csharp_new_line_before_open_brace = all + +# Modifier preferences +dotnet_style_require_accessibility_modifiers = for_non_interface_members:error + +# Code-block preferences +csharp_prefer_braces = true:error + +# Use language keywords for types +dotnet_style_predefined_type_for_member_access = true +dotnet_style_predefined_type_for_locals_parameters_members = true + +# Code Style +csharp_style_var_when_type_is_apparent = true +csharp_place_field_attribute_on_same_line=false +csharp_place_accessorholder_attribute_on_same_line=false +csharp_trailing_comma_in_multiline_lists=false +csharp_trailing_comma_in_singleline_lists=false +csharp_keep_existing_attribute_arrangement=false +csharp_blank_lines_around_region=1 +csharp_blank_lines_inside_region=1 +csharp_keep_blank_lines_in_code=false +csharp_remove_blank_lines_near_braces_in_code=true +csharp_blank_lines_before_control_transfer_statements=1 +csharp_blank_lines_after_control_transfer_statements=1 +csharp_blank_lines_before_block_statements=1 +csharp_blank_lines_after_block_statements=1 +csharp_blank_lines_before_multiline_statements=1 +csharp_blank_lines_after_multiline_statements=1 +csharp_blank_lines_around_block_case_section=0 +csharp_blank_lines_around_multiline_case_section=0 +csharp_blank_lines_before_case=0 +csharp_blank_lines_after_case=0 + +#### Resharper/Rider Rules #### +# https://www.jetbrains.com/help/resharper/EditorConfig_Properties.html + +resharper_csharp_force_attribute_style=separate +resharper_use_name_of_instead_of_type_of_highlighting=error +resharper_wrong_public_modifier_specification_highlighting=error diff --git a/OpenAI-DotNet-Proxy/Proxy/AbstractAuthenticationFilter.cs b/OpenAI-DotNet-Proxy/Proxy/AbstractAuthenticationFilter.cs index bf9bf4be..44fa8829 100644 --- a/OpenAI-DotNet-Proxy/Proxy/AbstractAuthenticationFilter.cs +++ b/OpenAI-DotNet-Proxy/Proxy/AbstractAuthenticationFilter.cs @@ -1,7 +1,7 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. -using System.Threading.Tasks; using Microsoft.AspNetCore.Http; +using System.Threading.Tasks; namespace OpenAI.Proxy { diff --git a/OpenAI-DotNet-Tests-Proxy/Program.cs b/OpenAI-DotNet-Tests-Proxy/Program.cs index d0281b4c..b85c6587 100644 --- a/OpenAI-DotNet-Tests-Proxy/Program.cs +++ b/OpenAI-DotNet-Tests-Proxy/Program.cs @@ -48,4 +48,4 @@ public static void Main(string[] args) OpenAIProxyStartup.CreateWebApplication(args, openAIClient).Run(); } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet-Tests/AbstractTestFixture.cs b/OpenAI-DotNet-Tests/AbstractTestFixture.cs index 8fad8793..cf155bf5 100644 --- a/OpenAI-DotNet-Tests/AbstractTestFixture.cs +++ b/OpenAI-DotNet-Tests/AbstractTestFixture.cs @@ -38,4 +38,4 @@ protected AbstractTestFixture() }; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet-Tests/TestFixture_00_01_Authentication.cs b/OpenAI-DotNet-Tests/TestFixture_00_01_Authentication.cs index c47d3f6a..345c101a 100644 --- a/OpenAI-DotNet-Tests/TestFixture_00_01_Authentication.cs +++ b/OpenAI-DotNet-Tests/TestFixture_00_01_Authentication.cs @@ -13,10 +13,10 @@ internal class TestFixture_00_01_Authentication [SetUp] public void Setup() { - var authJson = new AuthInfo("sk-test12", "org-testOrg"); + var authJson = new AuthInfo("sk-test12", "org-testOrg", "proj_testProject"); var authText = JsonSerializer.Serialize(authJson); - File.WriteAllText(".openai", authText); - Assert.IsTrue(File.Exists(".openai")); + File.WriteAllText(OpenAIAuthentication.CONFIG_FILE, authText); + Assert.IsTrue(File.Exists(OpenAIAuthentication.CONFIG_FILE)); } [Test] @@ -38,12 +38,14 @@ public void Test_01_GetAuthFromEnv() [Test] public void Test_02_GetAuthFromFile() { - var auth = OpenAIAuthentication.LoadFromPath(Path.GetFullPath(".openai")); + var auth = OpenAIAuthentication.LoadFromPath(Path.GetFullPath(OpenAIAuthentication.CONFIG_FILE)); Assert.IsNotNull(auth); Assert.IsNotNull(auth.ApiKey); Assert.AreEqual("sk-test12", auth.ApiKey); Assert.IsNotNull(auth.OrganizationId); Assert.AreEqual("org-testOrg", auth.OrganizationId); + Assert.IsNotNull(auth.ProjectId); + Assert.AreEqual("proj_testProject", auth.ProjectId); } [Test] @@ -68,7 +70,7 @@ public void Test_04_GetDefault() public void Test_05_Authentication() { var defaultAuth = OpenAIAuthentication.Default; - var manualAuth = new OpenAIAuthentication("sk-testAA", "org-testAA"); + var manualAuth = new OpenAIAuthentication("sk-testAA", "org-testAA", "proj_testProject"); var api = new OpenAIClient(); var shouldBeDefaultAuth = api.OpenAIAuthentication; Assert.IsNotNull(shouldBeDefaultAuth); @@ -76,8 +78,9 @@ public void Test_05_Authentication() Assert.IsNotNull(shouldBeDefaultAuth.OrganizationId); Assert.AreEqual(defaultAuth.ApiKey, shouldBeDefaultAuth.ApiKey); Assert.AreEqual(defaultAuth.OrganizationId, shouldBeDefaultAuth.OrganizationId); + Assert.AreEqual(defaultAuth.ProjectId, shouldBeDefaultAuth.ProjectId); - OpenAIAuthentication.Default = new OpenAIAuthentication("sk-testAA", "org-testAA"); + OpenAIAuthentication.Default = new OpenAIAuthentication("sk-testAA", "org-testAA", "proj_testProject"); api = new OpenAIClient(); var shouldBeManualAuth = api.OpenAIAuthentication; Assert.IsNotNull(shouldBeManualAuth); @@ -85,6 +88,7 @@ public void Test_05_Authentication() Assert.IsNotNull(shouldBeManualAuth.OrganizationId); Assert.AreEqual(manualAuth.ApiKey, shouldBeManualAuth.ApiKey); Assert.AreEqual(manualAuth.OrganizationId, shouldBeManualAuth.OrganizationId); + Assert.AreEqual(manualAuth.ProjectId, shouldBeDefaultAuth.ProjectId); OpenAIAuthentication.Default = defaultAuth; } @@ -181,12 +185,12 @@ public void Test_12_CustomDomainConfigurationSettings() [TearDown] public void TearDown() { - if (File.Exists(".openai")) + if (File.Exists(OpenAIAuthentication.CONFIG_FILE)) { - File.Delete(".openai"); + File.Delete(OpenAIAuthentication.CONFIG_FILE); } - Assert.IsFalse(File.Exists(".openai")); + Assert.IsFalse(File.Exists(OpenAIAuthentication.CONFIG_FILE)); } } } diff --git a/OpenAI-DotNet-Tests/TestFixture_00_02_Tools.cs b/OpenAI-DotNet-Tests/TestFixture_00_02_Tools.cs index dbb291c3..d14e0193 100644 --- a/OpenAI-DotNet-Tests/TestFixture_00_02_Tools.cs +++ b/OpenAI-DotNet-Tests/TestFixture_00_02_Tools.cs @@ -35,7 +35,8 @@ public async Task Test_02_Tool_Funcs() { Tool.FromFunc("test_func", Function), Tool.FromFunc("test_func_with_args", FunctionWithArgs), - Tool.FromFunc("test_func_weather", () => WeatherService.GetCurrentWeatherAsync("my location", WeatherService.WeatherUnit.Celsius)) + Tool.FromFunc("test_func_weather", () => WeatherService.GetCurrentWeatherAsync("my location", WeatherService.WeatherUnit.Celsius)), + Tool.FromFunc, string>("test_func_with_array_args", FunctionWithArrayArgs) }; var json = JsonSerializer.Serialize(tools, new JsonSerializerOptions(OpenAIClient.JsonSerializationOptions) @@ -48,6 +49,7 @@ public async Task Test_02_Tool_Funcs() Assert.IsNotNull(tool); var result = tool.InvokeFunction(); Assert.AreEqual("success", result); + var toolWithArgs = tools[1]; Assert.IsNotNull(toolWithArgs); toolWithArgs.Function.Arguments = new JsonObject @@ -63,6 +65,16 @@ public async Task Test_02_Tool_Funcs() var resultWeather = await toolWeather.InvokeFunctionAsync(); Assert.IsFalse(string.IsNullOrWhiteSpace(resultWeather)); Console.WriteLine(resultWeather); + + var toolWithArrayArgs = tools[3]; + Assert.IsNotNull(toolWithArrayArgs); + toolWithArrayArgs.Function.Arguments = new JsonObject + { + ["args"] = new JsonArray { 1, 2, 3, 4, 5 } + }; + var resultWithArrayArgs = toolWithArrayArgs.InvokeFunction(); + Assert.AreEqual("1, 2, 3, 4, 5", resultWithArrayArgs); + Console.WriteLine(resultWithArrayArgs); } private string Function() @@ -75,6 +87,11 @@ private string FunctionWithArgs(string arg1, string arg2) return $"{arg1} {arg2}"; } + private string FunctionWithArrayArgs(List args) + { + return string.Join(", ", args); + } + [Test] public void Test_03_Tool_works_when_called_concurrently() { @@ -100,4 +117,4 @@ async Task Test(int id) } } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet-Tests/TestFixture_02_Assistants.cs b/OpenAI-DotNet-Tests/TestFixture_02_Assistants.cs index ea199cff..5a57c87c 100644 --- a/OpenAI-DotNet-Tests/TestFixture_02_Assistants.cs +++ b/OpenAI-DotNet-Tests/TestFixture_02_Assistants.cs @@ -2,6 +2,7 @@ using NUnit.Framework; using OpenAI.Assistants; +using OpenAI.Files; using OpenAI.Models; using System; using System.Collections.Generic; @@ -21,32 +22,53 @@ public async Task Test_01_CreateAssistant() const string testFilePath = "assistant_test_1.txt"; await File.WriteAllTextAsync(testFilePath, "Knowledge is power!"); Assert.IsTrue(File.Exists(testFilePath)); - var file = await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath, "assistants"); - File.Delete(testFilePath); - Assert.IsFalse(File.Exists(testFilePath)); - var request = new CreateAssistantRequest(Model.GPT3_5_Turbo, - name: "test-assistant", - description: "Used for unit testing.", - instructions: "You are test assistant", - metadata: new Dictionary + FileResponse file = null; + + try + { + try { - ["int"] = "1", - ["test"] = Guid.NewGuid().ToString() - }, - tools: new[] + file = await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath, FilePurpose.Assistants); + } + finally { - Tool.Retrieval - }, - files: new[] { file.Id }); - var assistant = await OpenAIClient.AssistantsEndpoint.CreateAssistantAsync(request); - Assert.IsNotNull(assistant); - Assert.AreEqual("test-assistant", assistant.Name); - Assert.AreEqual("Used for unit testing.", assistant.Description); - Assert.AreEqual("You are test assistant", assistant.Instructions); - Assert.AreEqual(Model.GPT3_5_Turbo.ToString(), assistant.Model); - Assert.IsNotEmpty(assistant.Metadata); - testAssistant = assistant; - Console.WriteLine($"{assistant} -> {assistant.Metadata["test"]}"); + if (File.Exists(testFilePath)) + { + File.Delete(testFilePath); + } + + Assert.IsFalse(File.Exists(testFilePath)); + } + + var request = new CreateAssistantRequest(Model.GPT4_Turbo, + name: "test-assistant", + description: "Used for unit testing.", + instructions: "You are test assistant", + toolResources: new FileSearchResources(new List { file.Id }), + metadata: new Dictionary + { + ["int"] = "1", + ["test"] = Guid.NewGuid().ToString() + }, + tools: new[] { Tool.FileSearch }); + var assistant = await OpenAIClient.AssistantsEndpoint.CreateAssistantAsync(request); + Assert.IsNotNull(assistant); + Assert.AreEqual("test-assistant", assistant.Name); + Assert.AreEqual("Used for unit testing.", assistant.Description); + Assert.AreEqual("You are test assistant", assistant.Instructions); + Assert.AreEqual(Model.GPT4_Turbo.ToString(), assistant.Model); + Assert.IsNotEmpty(assistant.Metadata); + testAssistant = assistant; + Console.WriteLine($"{assistant} -> {assistant.Metadata["test"]}"); + } + finally + { + if (file != null) + { + var isDeleted = await OpenAIClient.FilesEndpoint.DeleteFileAsync(file); + Assert.IsTrue(isDeleted); + } + } } [Test] @@ -70,90 +92,18 @@ public async Task Test_03_ModifyAssistants() { Assert.IsNotNull(testAssistant); Assert.IsNotNull(OpenAIClient.AssistantsEndpoint); - var request = new CreateAssistantRequest( - model: Model.GPT4_Turbo, + var assistant = await testAssistant.ModifyAsync(new( + model: Model.GPT4o, name: "Test modified", description: "Modified description", - instructions: "You are modified test assistant"); - var assistant = await testAssistant.ModifyAsync(request); + instructions: "You are modified test assistant")); Assert.IsNotNull(assistant); Assert.AreEqual("Test modified", assistant.Name); Assert.AreEqual("Modified description", assistant.Description); Assert.AreEqual("You are modified test assistant", assistant.Instructions); - Assert.AreEqual(Model.GPT4_Turbo.ToString(), assistant.Model); + Assert.AreEqual(Model.GPT4o.ToString(), assistant.Model); Assert.IsTrue(assistant.Metadata.ContainsKey("test")); - Console.WriteLine($"{assistant.Id} -> modified"); - } - - [Test] - public async Task Test_04_01_UploadAssistantFile() - { - Assert.IsNotNull(testAssistant); - Assert.IsNotNull(OpenAIClient.AssistantsEndpoint); - const string testFilePath = "assistant_test_2.txt"; - await File.WriteAllTextAsync(testFilePath, "Knowledge is power!"); - Assert.IsTrue(File.Exists(testFilePath)); - var file = await testAssistant.UploadFileAsync(testFilePath); - Assert.IsNotNull(file); - Console.WriteLine($"uploaded -> {file.Id}"); - } - - [Test] - public async Task Test_04_02_ListAssistantFiles() - { - Assert.IsNotNull(testAssistant); - Assert.IsNotNull(OpenAIClient.AssistantsEndpoint); - var filesList = await testAssistant.ListFilesAsync(); - Assert.IsNotNull(filesList); - Assert.IsNotEmpty(filesList.Items); - Assert.IsTrue(filesList.Items.Count == 2); - - foreach (var file in filesList.Items) - { - Assert.IsNotNull(file); - var retrieved = await testAssistant.RetrieveFileAsync(file); - Assert.IsNotNull(retrieved); - Assert.IsTrue(retrieved.Id == file.Id); - Console.WriteLine($"{retrieved.AssistantId}'s file -> {retrieved.Id}"); - // TODO 400 Bad Request error when attempting to download assistant files. Likely OpenAI bug. - //var downloadPath = await retrieved.DownloadFileAsync(Directory.GetCurrentDirectory(), true); - //Console.WriteLine($"downloaded {retrieved} -> {downloadPath}"); - //Assert.IsTrue(File.Exists(downloadPath)); - //File.Delete(downloadPath); - //Assert.IsFalse(File.Exists(downloadPath)); - } - } - - [Test] - public async Task Test_04_03_RemoveAssistantFile() - { - Assert.IsNotNull(testAssistant); - Assert.IsNotNull(OpenAIClient.AssistantsEndpoint); - var filesList = await testAssistant.ListFilesAsync(); - Assert.IsNotNull(filesList); - Assert.IsNotEmpty(filesList.Items); - Assert.IsTrue(filesList.Items.Count == 2); - var assistantFile = filesList.Items[0]; - Assert.IsNotNull(assistantFile); - var isRemoved = await testAssistant.RemoveFileAsync(assistantFile); - Assert.IsTrue(isRemoved); - var isDeleted = await OpenAIClient.FilesEndpoint.DeleteFileAsync(assistantFile); - Assert.IsTrue(isDeleted); - } - - [Test] - public async Task Test_04_04_DeleteAssistantFiles() - { - Assert.IsNotNull(testAssistant); - Assert.IsNotNull(OpenAIClient.AssistantsEndpoint); - var filesList = await testAssistant.ListFilesAsync(); - Assert.IsNotNull(filesList); - Assert.IsNotEmpty(filesList.Items); - Assert.IsTrue(filesList.Items.Count == 1); - var assistantFile = filesList.Items[0]; - Assert.IsNotNull(assistantFile); - var isDeleted = await testAssistant.DeleteFileAsync(assistantFile); - Assert.IsTrue(isDeleted); + Console.WriteLine($"modified assistant -> {assistant.Id}"); } [Test] @@ -161,9 +111,9 @@ public async Task Test_05_DeleteAssistant() { Assert.IsNotNull(testAssistant); Assert.IsNotNull(OpenAIClient.AssistantsEndpoint); - var result = await testAssistant.DeleteAsync(); + var result = await testAssistant.DeleteAsync(deleteToolResources: true); Assert.IsTrue(result); - Console.WriteLine($"{testAssistant.Id} -> deleted"); + Console.WriteLine($"deleted assistant -> {testAssistant.Id}"); } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet-Tests/TestFixture_03_Threads.cs b/OpenAI-DotNet-Tests/TestFixture_03_Threads.cs index d8780858..5b7abaf7 100644 --- a/OpenAI-DotNet-Tests/TestFixture_03_Threads.cs +++ b/OpenAI-DotNet-Tests/TestFixture_03_Threads.cs @@ -10,6 +10,7 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Net.Http; using System.Threading.Tasks; namespace OpenAI.Tests @@ -28,12 +29,12 @@ internal class TestFixture_03_Threads : AbstractTestFixture public async Task Test_01_CreateThread() { Assert.IsNotNull(OpenAIClient.ThreadsEndpoint); - var thread = await OpenAIClient.ThreadsEndpoint.CreateThreadAsync(new CreateThreadRequest( - new List + var thread = await OpenAIClient.ThreadsEndpoint.CreateThreadAsync(new( + messages: new List { "Test message" }, - new Dictionary + metadata: new Dictionary { ["test"] = nameof(Test_01_CreateThread) })); @@ -80,21 +81,35 @@ public async Task Test_04_01_CreateMessage() const string testFilePath = "assistant_test_1.txt"; await File.WriteAllTextAsync(testFilePath, "Knowledge is power!"); Assert.IsTrue(File.Exists(testFilePath)); - var file = await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath, "assistants"); - Assert.NotNull(file); - File.Delete(testFilePath); - Assert.IsFalse(File.Exists(testFilePath)); - await testThread.CreateMessageAsync("hello world!"); - var request = new CreateMessageRequest("Test create message", - new[] { file.Id }, - new Dictionary - { - ["test"] = nameof(Test_04_01_CreateMessage) - }); + FileResponse file = null; MessageResponse message; + try { - message = await testThread.CreateMessageAsync(request); + try + { + file = await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath, FilePurpose.Assistants); + Assert.NotNull(file); + } + finally + { + if (File.Exists(testFilePath)) + { + File.Delete(testFilePath); + } + + Assert.IsFalse(File.Exists(testFilePath)); + } + + message = await testThread.CreateMessageAsync("hello world!"); + Assert.IsNotNull(message); + message = await testThread.CreateMessageAsync(new( + content: "Test create message", + attachments: new[] { new Attachment(file.Id, Tool.FileSearch) }, + metadata: new Dictionary + { + ["test"] = nameof(Test_04_01_CreateMessage) + })); } finally { @@ -153,97 +168,175 @@ public async Task Test_04_03_ModifyMessage() } [Test] - public async Task Test_04_04_UploadAndDownloadMessageFiles() + public async Task Test_05_DeleteThread() { Assert.IsNotNull(testThread); Assert.IsNotNull(OpenAIClient.ThreadsEndpoint); - var file1 = await CreateTestFileAsync("test_1.txt"); - var file2 = await CreateTestFileAsync("test_2.txt"); + var isDeleted = await testThread.DeleteAsync(deleteToolResources: true); + Assert.IsTrue(isDeleted); + Console.WriteLine($"Deleted thread -> {testThread.Id}"); + } + + [Test] + public async Task Test_06_01_01_CreateRun() + { + Assert.NotNull(OpenAIClient.ThreadsEndpoint); + var assistant = await OpenAIClient.AssistantsEndpoint.CreateAssistantAsync( + new CreateAssistantRequest( + name: "Math Tutor", + instructions: "You are a personal math tutor. Answer questions briefly, in a sentence or less. Your responses should be formatted in JSON.", + model: Model.GPT4o, + responseFormat: ChatResponseFormat.Json)); + Assert.NotNull(assistant); + var thread = await OpenAIClient.ThreadsEndpoint.CreateThreadAsync(); + try { - var createRequest = new CreateMessageRequest("Test content with files", new[] { file1.Id, file2.Id }); - var message = await testThread.CreateMessageAsync(createRequest); - var fileList = await message.ListFilesAsync(); - Assert.IsNotNull(fileList); - Assert.AreEqual(2, fileList.Items.Count); + Assert.NotNull(thread); + var message = await thread.CreateMessageAsync("I need to solve the equation `3x + 11 = 14`. Can you help me?"); + Assert.NotNull(message); + var run = await thread.CreateRunAsync(assistant); + Assert.IsNotNull(run); + run = await run.WaitForStatusChangeAsync(); + Assert.IsNotNull(run); + Assert.IsTrue(run.Status == RunStatus.Completed); + var messages = await thread.ListMessagesAsync(); - foreach (var file in fileList.Items) + foreach (var response in messages.Items) { - var retrieved = await message.RetrieveFileAsync(file); - Assert.IsNotNull(retrieved); - Console.WriteLine(file.Id); - // TODO 400 bad request errors. Likely OpenAI bug downloading message file content. - //var filePath = await message.DownloadFileContentAsync(file, Directory.GetCurrentDirectory(), true); - //Assert.IsFalse(string.IsNullOrWhiteSpace(filePath)); - //Assert.IsTrue(File.Exists(filePath)); - //File.Delete(filePath); + Console.WriteLine($"{response.Role}: {response.PrintContent()}"); } - - var threadList = await testThread.ListFilesAsync(message); - Assert.IsNotNull(threadList); - Assert.IsNotEmpty(threadList.Items); - - //foreach (var file in threadList.Items) - //{ - // // TODO 400 bad request errors. Likely OpenAI bug downloading message file content. - // var filePath = await file.DownloadContentAsync(Directory.GetCurrentDirectory(), true); - // Assert.IsFalse(string.IsNullOrWhiteSpace(filePath)); - // Assert.IsTrue(File.Exists(filePath)); - // File.Delete(filePath); - //} } finally { - await CleanupFileAsync(file1); - await CleanupFileAsync(file2); + await assistant.DeleteAsync(); + await thread.DeleteAsync(deleteToolResources: true); } } [Test] - public async Task Test_05_DeleteThread() - { - Assert.IsNotNull(testThread); - Assert.IsNotNull(OpenAIClient.ThreadsEndpoint); - var isDeleted = await testThread.DeleteAsync(); - Assert.IsTrue(isDeleted); - Console.WriteLine($"Deleted thread {testThread.Id}"); - } - - [Test] - public async Task Test_06_01_CreateRun() + public async Task Test_06_01_02_CreateStreamingRun() { Assert.NotNull(OpenAIClient.ThreadsEndpoint); var assistant = await OpenAIClient.AssistantsEndpoint.CreateAssistantAsync( new CreateAssistantRequest( name: "Math Tutor", - instructions: "You are a personal math tutor. Answer questions briefly, in a sentence or less.", - model: Model.GPT4_Turbo)); + instructions: "You are a personal math tutor. Answer questions briefly, in a sentence or less. Your responses should be formatted in JSON.", + model: Model.GPT4o, + responseFormat: ChatResponseFormat.Json)); Assert.NotNull(assistant); - testAssistant = assistant; var thread = await OpenAIClient.ThreadsEndpoint.CreateThreadAsync(); - Assert.NotNull(thread); try { + Assert.NotNull(thread); var message = await thread.CreateMessageAsync("I need to solve the equation `3x + 11 = 14`. Can you help me?"); Assert.NotNull(message); - var run = await thread.CreateRunAsync(assistant); + + var run = await thread.CreateRunAsync(assistant, streamEvent => + { + Console.WriteLine(streamEvent.ToJsonString()); + }); + + Assert.IsNotNull(run); + Assert.IsTrue(run.Status == RunStatus.Completed); + var messages = await thread.ListMessagesAsync(); + + foreach (var response in messages.Items.Reverse()) + { + Console.WriteLine($"{response.Role}: {response.PrintContent()}"); + } + } + finally + { + await assistant.DeleteAsync(); + await thread.DeleteAsync(deleteToolResources: true); + } + } + + [Test] + public async Task Test_06_01_03_CreateStreamingRun_ToolCalls() + { + Assert.NotNull(OpenAIClient.ThreadsEndpoint); + var tools = new List + { + Tool.GetOrCreateTool(typeof(WeatherService), nameof(WeatherService.GetCurrentWeatherAsync)) + }; + var assistantRequest = new CreateAssistantRequest(tools: tools, instructions: "You are a helpful weather assistant. Use the appropriate unit based on geographical location."); + var assistant = await OpenAIClient.AssistantsEndpoint.CreateAssistantAsync(assistantRequest); + Assert.NotNull(assistant); + ThreadResponse thread = null; + + try + { + async void StreamEventHandler(IServerSentEvent streamEvent) + { + try + { + switch (streamEvent) + { + case ThreadResponse threadResponse: + thread = threadResponse; + break; + case RunResponse runResponse: + if (runResponse.Status == RunStatus.RequiresAction) + { + var toolOutputs = await assistant.GetToolOutputsAsync(runResponse); + + foreach (var toolOutput in toolOutputs) + { + Console.WriteLine($"Tool Output: {toolOutput}"); + } + + await runResponse.SubmitToolOutputsAsync(toolOutputs, StreamEventHandler); + } + break; + default: + Console.WriteLine(streamEvent.ToJsonString()); + break; + } + } + catch (Exception e) + { + Console.WriteLine(e); + } + } + + var run = await assistant.CreateThreadAndRunAsync("I'm in Kuala-Lumpur, please tell me what's the temperature now?", StreamEventHandler); + Assert.NotNull(thread); Assert.IsNotNull(run); run = await run.WaitForStatusChangeAsync(); Assert.IsNotNull(run); Assert.IsTrue(run.Status == RunStatus.Completed); + var messages = await thread.ListMessagesAsync(); + + foreach (var response in messages.Items.Reverse()) + { + Console.WriteLine($"{response.Role}: {response.PrintContent()}"); + } } finally { - await thread.DeleteAsync(); + if (thread != null) + { + await thread.DeleteAsync(); + } + + await assistant.DeleteAsync(deleteToolResources: true); } } [Test] public async Task Test_06_02_CreateThreadAndRun() { - Assert.NotNull(testAssistant); Assert.NotNull(OpenAIClient.ThreadsEndpoint); + testAssistant = await OpenAIClient.AssistantsEndpoint.CreateAssistantAsync( + new CreateAssistantRequest( + name: "Math Tutor", + instructions: "You are a personal math tutor. Answer questions briefly, in a sentence or less. Your responses should be formatted in JSON.", + model: Model.GPT4o, + responseFormat: ChatResponseFormat.Json)); + Assert.NotNull(testAssistant); var messages = new List { "I need to solve the equation `3x + 11 = 14`. Can you help me?" }; var threadRequest = new CreateThreadRequest(messages); var run = await testAssistant.CreateThreadAndRunAsync(threadRequest); @@ -282,7 +375,7 @@ public async Task Test_06_04_ModifyRun() { Assert.NotNull(testRun); Assert.NotNull(OpenAIClient.ThreadsEndpoint); - // run in Queued and InProgress can't be modified + // a run that is Queued or InProgress can't be modified var run = await testRun.WaitForStatusChangeAsync(); Assert.IsNotNull(run); Assert.IsTrue(run.Status == RunStatus.Completed); @@ -304,26 +397,34 @@ public async Task Test_06_05_CancelRun() Assert.IsNotNull(testThread); Assert.IsNotNull(testAssistant); Assert.NotNull(OpenAIClient.ThreadsEndpoint); - var run = await testThread.CreateRunAsync(testAssistant); + var run = await testThread.CreateRunAsync(new CreateRunRequest(testAssistant)); Assert.IsNotNull(run); Assert.IsTrue(run.Status == RunStatus.Queued); - run = await run.CancelAsync(); - Assert.IsNotNull(run); - Assert.IsTrue(run.Status == RunStatus.Cancelling); try { - // waiting while run is cancelling - run = await run.WaitForStatusChangeAsync(); + var runCancelled = await run.CancelAsync(); + Assert.IsNotNull(runCancelled); + Assert.IsTrue(runCancelled); } catch (Exception e) { // Sometimes runs will get stuck in Cancelling state, - // for now we just log when it happens. + // or will say it is already cancelled, but it was not, + // so for now we just log when it happens. Console.WriteLine(e); + + if (e is HttpRequestException httpException) + { + if (!httpException.Message.Contains("Cannot cancel run with status")) + { + throw; + } + } } - Assert.IsTrue(run.Status is RunStatus.Cancelled or RunStatus.Cancelling); + run = await run.UpdateAsync(); + Assert.IsTrue(run.Status is RunStatus.Cancelled or RunStatus.Cancelling or RunStatus.Completed); } [Test] @@ -337,7 +438,7 @@ public async Task Test_06_06_TestCleanup() if (testThread != null) { - var isDeleted = await testThread.DeleteAsync(); + var isDeleted = await testThread.DeleteAsync(deleteToolResources: true); Assert.IsTrue(isDeleted); } } @@ -372,7 +473,7 @@ public async Task Test_07_01_SubmitToolOutput() Assert.IsNotNull(runStep.Client); var retrievedRunStep = await runStep.UpdateAsync(); Assert.IsNotNull(retrievedRunStep); - Console.WriteLine($"[{runStep.Id}] {runStep.Status} {runStep.CreatedAt} -> {runStep.ExpiresAt}"); + Console.WriteLine($"[{runStep.Id}] {runStep.Status} {runStep.CreatedAt} -> {runStep.ExpiredAt}"); var retrieveStepRunStep = await run.RetrieveRunStepAsync(runStep.Id); Assert.IsNotNull(retrieveStepRunStep); } @@ -386,7 +487,7 @@ public async Task Test_07_01_SubmitToolOutput() Console.WriteLine($"tool call arguments: {toolCall.FunctionCall.Arguments}"); // Invoke all the tool call functions and return the tool outputs. - var toolOutputs = await testAssistant.GetToolOutputsAsync(run.RequiredAction.SubmitToolOutputs.ToolCalls); + var toolOutputs = await testAssistant.GetToolOutputsAsync(run); foreach (var toolOutput in toolOutputs) { @@ -405,7 +506,7 @@ public async Task Test_07_01_SubmitToolOutput() Assert.IsNotNull(runStep.Client); var retrievedRunStep = await runStep.UpdateAsync(); Assert.IsNotNull(retrievedRunStep); - Console.WriteLine($"[{runStep.Id}] {runStep.Status} {runStep.CreatedAt} -> {(runStep.ExpiresAtUnixTimeSeconds.HasValue ? runStep.ExpiresAt : runStep.CompletedAt)}"); + Console.WriteLine($"[{runStep.Id}] {runStep.Status} {runStep.CreatedAt} -> {(runStep.ExpiredAtUnixTimeSeconds.HasValue ? runStep.ExpiredAt : runStep.CompletedAt)}"); if (runStep.StepDetails.ToolCalls == null) { continue; } foreach (var runStepToolCall in runStep.StepDetails.ToolCalls) @@ -437,7 +538,7 @@ public async Task Test_07_02_TestCleanup() if (testThread != null) { - var isDeleted = await testThread.DeleteAsync(); + var isDeleted = await testThread.DeleteAsync(deleteToolResources: true); Assert.IsTrue(isDeleted); } } @@ -446,7 +547,7 @@ private async Task CreateTestFileAsync(string filePath) { await File.WriteAllTextAsync(filePath, "Knowledge is power!"); Assert.IsTrue(File.Exists(filePath)); - var file = await OpenAIClient.FilesEndpoint.UploadFileAsync(filePath, "assistants"); + var file = await OpenAIClient.FilesEndpoint.UploadFileAsync(filePath, FilePurpose.Assistants); File.Delete(filePath); Assert.IsFalse(File.Exists(filePath)); return file; @@ -454,8 +555,9 @@ private async Task CreateTestFileAsync(string filePath) private async Task CleanupFileAsync(FileResponse file) { + if (file == null) { return; } var isDeleted = await OpenAIClient.FilesEndpoint.DeleteFileAsync(file); Assert.IsTrue(isDeleted); } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet-Tests/TestFixture_04_Chat.cs b/OpenAI-DotNet-Tests/TestFixture_04_Chat.cs index 91cf541e..a0e1ae22 100644 --- a/OpenAI-DotNet-Tests/TestFixture_04_Chat.cs +++ b/OpenAI-DotNet-Tests/TestFixture_04_Chat.cs @@ -22,9 +22,9 @@ public async Task Test_01_01_GetChatCompletion() new(Role.System, "You are a helpful assistant."), new(Role.User, "Who won the world series in 2020?"), new(Role.Assistant, "The Los Angeles Dodgers won the World Series in 2020."), - new(Role.User, "Where was it played?"), + new(Role.User, "Where was it played?") }; - var chatRequest = new ChatRequest(messages, Model.GPT4_Turbo); + var chatRequest = new ChatRequest(messages, Model.GPT4o); var response = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); @@ -47,13 +47,14 @@ public async Task Test_01_02_GetChatStreamingCompletion() new(Role.System, "You are a helpful assistant."), new(Role.User, "Who won the world series in 2020?"), new(Role.Assistant, "The Los Angeles Dodgers won the World Series in 2020."), - new(Role.User, "Where was it played?"), + new(Role.User, "Where was it played?") }; var chatRequest = new ChatRequest(messages); var cumulativeDelta = string.Empty; var response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); @@ -61,7 +62,7 @@ public async Task Test_01_02_GetChatStreamingCompletion() { cumulativeDelta += choice.Delta.Content; } - }); + }, true); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); var choice = response.FirstChoice; @@ -84,7 +85,7 @@ public async Task Test_01_03_JsonMode() new(Role.System, "You are a helpful assistant designed to output JSON."), new(Role.User, "Who won the world series in 2020?"), }; - var chatRequest = new ChatRequest(messages, Model.GPT4_Turbo, responseFormat: ChatResponseFormat.Json); + var chatRequest = new ChatRequest(messages, Model.GPT4o, responseFormat: ChatResponseFormat.Json); var response = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); @@ -111,9 +112,10 @@ public async Task Test_01_04_GetChatStreamingCompletionEnumerableAsync() }; var cumulativeDelta = string.Empty; var chatRequest = new ChatRequest(messages); - await foreach (var partialResponse in OpenAIClient.ChatEndpoint.StreamCompletionEnumerableAsync(chatRequest)) + await foreach (var partialResponse in OpenAIClient.ChatEndpoint.StreamCompletionEnumerableAsync(chatRequest, true)) { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); @@ -214,9 +216,10 @@ public async Task Test_02_02_GetChatToolCompletion_Streaming() var response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); - }); + }, true); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); Assert.IsTrue(response.Choices.Count == 1); @@ -229,9 +232,10 @@ public async Task Test_02_02_GetChatToolCompletion_Streaming() response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); - }); + }, true); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); Assert.IsTrue(response.Choices.Count == 1); @@ -248,9 +252,10 @@ public async Task Test_02_02_GetChatToolCompletion_Streaming() response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); - }); + }, true); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); Assert.IsTrue(response.Choices.Count == 1); @@ -272,9 +277,10 @@ public async Task Test_02_02_GetChatToolCompletion_Streaming() response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); - }); + }, true); Assert.IsNotNull(response); } @@ -289,10 +295,11 @@ public async Task Test_02_03_ChatCompletion_Multiple_Tools_Streaming() }; var tools = Tool.GetAllAvailableTools(false, forceUpdate: true, clearCache: true); - var chatRequest = new ChatRequest(messages, model: Model.GPT4_Turbo, tools: tools, toolChoice: "auto"); + var chatRequest = new ChatRequest(messages, model: Model.GPT4o, tools: tools, toolChoice: "auto"); var response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); }); @@ -311,7 +318,7 @@ public async Task Test_02_03_ChatCompletion_Multiple_Tools_Streaming() messages.Add(new Message(toolCall, output)); } - chatRequest = new ChatRequest(messages, model: Model.GPT4_Turbo, tools: tools, toolChoice: "none"); + chatRequest = new ChatRequest(messages, model: Model.GPT4o, tools: tools, toolChoice: "none"); response = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest); Assert.IsNotNull(response); @@ -383,7 +390,7 @@ public async Task Test_03_01_GetChatVision() new ImageUrl("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", ImageDetail.Low) }) }; - var chatRequest = new ChatRequest(messages, model: Model.GPT4_Turbo); + var chatRequest = new ChatRequest(messages, model: Model.GPT4o); var response = await OpenAIClient.ChatEndpoint.GetCompletionAsync(chatRequest); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); @@ -404,10 +411,11 @@ public async Task Test_03_02_GetChatVisionStreaming() new ImageUrl("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", ImageDetail.Low) }) }; - var chatRequest = new ChatRequest(messages, model: Model.GPT4_Turbo); + var chatRequest = new ChatRequest(messages, model: Model.GPT4o); var response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); }); @@ -443,7 +451,7 @@ public async Task Test_04_01_GetChatLogProbs() } [Test] - public async Task Test_04_02_GetChatLogProbsSteaming() + public async Task Test_04_02_GetChatLogProbsStreaming() { Assert.IsNotNull(OpenAIClient.ChatEndpoint); var messages = new List @@ -458,6 +466,7 @@ public async Task Test_04_02_GetChatLogProbsSteaming() var response = await OpenAIClient.ChatEndpoint.StreamCompletionAsync(chatRequest, partialResponse => { Assert.IsNotNull(partialResponse); + if (partialResponse.Usage != null) { return; } Assert.NotNull(partialResponse.Choices); Assert.NotZero(partialResponse.Choices.Count); @@ -465,7 +474,7 @@ public async Task Test_04_02_GetChatLogProbsSteaming() { cumulativeDelta += choice.Delta.Content; } - }); + }, true); Assert.IsNotNull(response); Assert.IsNotNull(response.Choices); var choice = response.FirstChoice; @@ -479,4 +488,4 @@ public async Task Test_04_02_GetChatLogProbsSteaming() response.GetUsage(); } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet-Tests/TestFixture_05_Images.cs b/OpenAI-DotNet-Tests/TestFixture_05_Images.cs index 31989e02..ecf18b05 100644 --- a/OpenAI-DotNet-Tests/TestFixture_05_Images.cs +++ b/OpenAI-DotNet-Tests/TestFixture_05_Images.cs @@ -33,7 +33,7 @@ public async Task Test_01_02_GenerateImages_B64_Json() { Assert.IsNotNull(OpenAIClient.ImagesEndPoint); - var request = new ImageGenerationRequest("A house riding a velociraptor", Model.DallE_2, responseFormat: ResponseFormat.B64_Json); + var request = new ImageGenerationRequest("A house riding a velociraptor", Model.DallE_2, responseFormat: ImageResponseFormat.B64_Json); var imageResults = await OpenAIClient.ImagesEndPoint.GenerateImageAsync(request); Assert.IsNotNull(imageResults); @@ -75,7 +75,7 @@ public async Task Test_02_02_CreateImageEdit_B64_Json() var imageAssetPath = Path.GetFullPath("../../../Assets/image_edit_original.png"); var maskAssetPath = Path.GetFullPath("../../../Assets/image_edit_mask.png"); - var request = new ImageEditRequest(imageAssetPath, maskAssetPath, "A sunlit indoor lounge area with a pool containing a flamingo", size: ImageSize.Small, responseFormat: ResponseFormat.B64_Json); + var request = new ImageEditRequest(imageAssetPath, maskAssetPath, "A sunlit indoor lounge area with a pool containing a flamingo", size: ImageSize.Small, responseFormat: ImageResponseFormat.B64_Json); var imageResults = await OpenAIClient.ImagesEndPoint.CreateImageEditAsync(request); Assert.IsNotNull(imageResults); @@ -113,7 +113,7 @@ public async Task Test_03_02_CreateImageVariation_B64_Json() Assert.IsNotNull(OpenAIClient.ImagesEndPoint); var imageAssetPath = Path.GetFullPath("../../../Assets/image_edit_original.png"); - var request = new ImageVariationRequest(imageAssetPath, size: ImageSize.Small, responseFormat: ResponseFormat.B64_Json); + var request = new ImageVariationRequest(imageAssetPath, size: ImageSize.Small, responseFormat: ImageResponseFormat.B64_Json); var imageResults = await OpenAIClient.ImagesEndPoint.CreateImageVariationAsync(request); Assert.IsNotNull(imageResults); diff --git a/OpenAI-DotNet-Tests/TestFixture_06_Embeddings.cs b/OpenAI-DotNet-Tests/TestFixture_06_Embeddings.cs index b6590572..2f92127f 100644 --- a/OpenAI-DotNet-Tests/TestFixture_06_Embeddings.cs +++ b/OpenAI-DotNet-Tests/TestFixture_06_Embeddings.cs @@ -1,15 +1,15 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. using NUnit.Framework; -using System.Threading.Tasks; using OpenAI.Models; +using System.Threading.Tasks; namespace OpenAI.Tests { internal class TestFixture_06_Embeddings : AbstractTestFixture { [Test] - public async Task Test_1_CreateEmbedding() + public async Task Test_01_CreateEmbedding() { Assert.IsNotNull(OpenAIClient.EmbeddingsEndpoint); var embedding = await OpenAIClient.EmbeddingsEndpoint.CreateEmbeddingAsync("The food was delicious and the waiter..."); @@ -18,7 +18,7 @@ public async Task Test_1_CreateEmbedding() } [Test] - public async Task Test_2_CreateEmbeddingWithDimensions() + public async Task Test_02_CreateEmbeddingWithDimensions() { Assert.IsNotNull(OpenAIClient.EmbeddingsEndpoint); var embedding = await OpenAIClient.EmbeddingsEndpoint.CreateEmbeddingAsync("The food was delicious and the waiter...", @@ -29,7 +29,7 @@ public async Task Test_2_CreateEmbeddingWithDimensions() } [Test] - public async Task Test_3_CreateEmbeddingsWithMultipleInputs() + public async Task Test_03_CreateEmbeddingsWithMultipleInputs() { Assert.IsNotNull(OpenAIClient.EmbeddingsEndpoint); var embeddings = new[] diff --git a/OpenAI-DotNet-Tests/TestFixture_08_Files.cs b/OpenAI-DotNet-Tests/TestFixture_08_Files.cs index 63bcd64c..af402baf 100644 --- a/OpenAI-DotNet-Tests/TestFixture_08_Files.cs +++ b/OpenAI-DotNet-Tests/TestFixture_08_Files.cs @@ -2,6 +2,7 @@ using NUnit.Framework; using OpenAI.Chat; +using OpenAI.Files; using System; using System.Collections.Generic; using System.IO; @@ -18,7 +19,7 @@ public async Task Test_01_UploadFile() var testData = new Conversation(new List { new(Role.Assistant, "I'm a learning language model") }); await File.WriteAllTextAsync("test.jsonl", testData); Assert.IsTrue(File.Exists("test.jsonl")); - var result = await OpenAIClient.FilesEndpoint.UploadFileAsync("test.jsonl", "fine-tune"); + var result = await OpenAIClient.FilesEndpoint.UploadFileAsync("test.jsonl", FilePurpose.FineTune); Assert.IsNotNull(result); Assert.IsTrue(result.FileName == "test.jsonl"); Console.WriteLine($"{result.Id} -> {result.Object}"); diff --git a/OpenAI-DotNet-Tests/TestFixture_11_VectorStores.cs b/OpenAI-DotNet-Tests/TestFixture_11_VectorStores.cs new file mode 100644 index 00000000..874a6a7a --- /dev/null +++ b/OpenAI-DotNet-Tests/TestFixture_11_VectorStores.cs @@ -0,0 +1,227 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using NuGet.Frameworks; +using NUnit.Framework; +using OpenAI.Files; +using OpenAI.VectorStores; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; + +namespace OpenAI.Tests +{ + internal class TestFixture_11_VectorStores : AbstractTestFixture + { + [Test] + public async Task Test_01_VectorStores_SingleFile() + { + Assert.IsNotNull(OpenAIClient.VectorStoresEndpoint); + + const string testFilePath = "vector_file_test_1.txt"; + await File.WriteAllTextAsync(testFilePath, "Knowledge is power!"); + Assert.IsTrue(File.Exists(testFilePath)); + FileResponse file = null; + + try + { + try + { + file = await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath, FilePurpose.Assistants); + } + finally + { + if (File.Exists(testFilePath)) + { + File.Delete(testFilePath); + } + + Assert.IsFalse(File.Exists(testFilePath)); + } + + VectorStoreResponse vectorStore = null; + + try + { + // create vector store + var createVectorStoreRequest = new CreateVectorStoreRequest("test-vector-store"); + vectorStore = await OpenAIClient.VectorStoresEndpoint.CreateVectorStoreAsync(createVectorStoreRequest); + Assert.IsNotNull(vectorStore); + Assert.AreEqual("test-vector-store", vectorStore.Name); + + // list vector stores + var vectorStores = await OpenAIClient.VectorStoresEndpoint.ListVectorStoresAsync(); + Assert.IsNotNull(vectorStores); + Assert.IsNotEmpty(vectorStores.Items); + + // modify vector store + IReadOnlyDictionary metadata = new Dictionary { { nameof(Test_01_VectorStores_SingleFile), DateTime.UtcNow } }; + var modifiedVectorStore = await OpenAIClient.VectorStoresEndpoint.ModifyVectorStoreAsync(vectorStore, metadata: metadata); + Assert.IsNotNull(modifiedVectorStore); + Assert.AreEqual(vectorStore.Id, modifiedVectorStore.Id); + + // retrieve vector store + var retrievedVectorStore = await OpenAIClient.VectorStoresEndpoint.GetVectorStoreAsync(vectorStore); + Assert.IsNotNull(retrievedVectorStore); + Assert.AreEqual(vectorStore.Id, retrievedVectorStore.Id); + + VectorStoreFileResponse vectorStoreFile = null; + + try + { + // create vector store file + vectorStoreFile = await OpenAIClient.VectorStoresEndpoint.CreateVectorStoreFileAsync(vectorStore, file, new ChunkingStrategy(ChunkingStrategyType.Static)); + Assert.IsNotNull(vectorStoreFile); + + // list vector store files + var vectorStoreFiles = await OpenAIClient.VectorStoresEndpoint.ListVectorStoreFilesAsync(vectorStore); + Assert.IsNotNull(vectorStoreFiles); + Assert.IsNotEmpty(vectorStoreFiles.Items); + + // retrieve vector store file + var retrievedVectorStoreFile = await OpenAIClient.VectorStoresEndpoint.GetVectorStoreFileAsync(vectorStore, vectorStoreFile); + Assert.IsNotNull(retrievedVectorStoreFile); + Assert.AreEqual(vectorStoreFile.Id, retrievedVectorStoreFile.Id); + } + finally + { + if (vectorStoreFile != null) + { + // delete vector store file + var deletedVectorStoreFile = await OpenAIClient.VectorStoresEndpoint.DeleteVectorStoreFileAsync(vectorStore, vectorStoreFile); + Assert.IsNotNull(deletedVectorStoreFile); + Assert.IsTrue(deletedVectorStoreFile); + } + } + } + finally + { + if (vectorStore != null) + { + // delete vector store + var deletedVectorStore = await OpenAIClient.VectorStoresEndpoint.DeleteVectorStoreAsync(vectorStore); + Assert.IsNotNull(deletedVectorStore); + Assert.IsTrue(deletedVectorStore); + } + } + } + finally + { + if (file != null) + { + var isDeleted = await OpenAIClient.FilesEndpoint.DeleteFileAsync(file); + Assert.IsTrue(isDeleted); + } + } + } + + [Test] + public async Task Test_02_VectorStores_BatchFiles() + { + Assert.IsNotNull(OpenAIClient.VectorStoresEndpoint); + + const string testFilePath1 = "vector_file_test_2_1.txt"; + const string testFilePath2 = "vector_file_test_2_2.txt"; + await File.WriteAllTextAsync(testFilePath1, "Knowledge is power!"); + await File.WriteAllTextAsync(testFilePath2, "Knowledge is power!"); + Assert.IsTrue(File.Exists(testFilePath1)); + Assert.IsTrue(File.Exists(testFilePath2)); + ConcurrentBag files = new(); + + try + { + try + { + var uploadTasks = new List + { + Task.Run(async () => files.Add(await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath1, FilePurpose.Assistants))), + Task.Run(async () => files.Add(await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath2, FilePurpose.Assistants))) + }; + + await Task.WhenAll(uploadTasks).ConfigureAwait(false); + } + finally + { + if (File.Exists(testFilePath1)) + { + File.Delete(testFilePath1); + } + + if (File.Exists(testFilePath2)) + { + File.Delete(testFilePath2); + } + + Assert.IsFalse(File.Exists(testFilePath1)); + Assert.IsFalse(File.Exists(testFilePath2)); + } + + VectorStoreResponse vectorStore = null; + + try + { + var createVectorStoreRequest = new CreateVectorStoreRequest(name: "test-vector-store-batch", files.ToList()); + vectorStore = await OpenAIClient.VectorStoresEndpoint.CreateVectorStoreAsync(createVectorStoreRequest); + Assert.IsNotNull(vectorStore); + Assert.AreEqual("test-vector-store-batch", vectorStore.Name); + + // create vector store batch + var vectorStoreFileBatch = await OpenAIClient.VectorStoresEndpoint.CreateVectorStoreFileBatchAsync(vectorStore, files.ToList()); + Assert.IsNotNull(vectorStoreFileBatch); + + // cancel vector store batch + var cancelledVectorStoreFileBatch = await OpenAIClient.VectorStoresEndpoint.CancelVectorStoreFileBatchAsync(vectorStore, vectorStoreFileBatch); + Assert.IsNotNull(cancelledVectorStoreFileBatch); + Assert.IsTrue(cancelledVectorStoreFileBatch); + + // create vector store batch + vectorStoreFileBatch = await OpenAIClient.VectorStoresEndpoint.CreateVectorStoreFileBatchAsync(vectorStore, files.ToList()); + Assert.IsNotNull(vectorStoreFileBatch); + + // currently no way to list vector store batches + //var vectorStoreFileBatches = await OpenAIClient.VectorStoresEndpoint.ListVectorStoreFileBatchesAsync(vectorStore); + //Assert.IsNotNull(vectorStoreFileBatches); + //Assert.IsNotEmpty(vectorStoreFileBatches.Items); + + // retrieve vector store batch + var retrievedVectorStoreFileBatch = await vectorStoreFileBatch.WaitForStatusChangeAsync(); + Assert.IsNotNull(retrievedVectorStoreFileBatch); + Assert.IsTrue(retrievedVectorStoreFileBatch.Status == VectorStoreFileStatus.Completed); + + // list vector store batch files + var vectorStoreBatchFiles = await OpenAIClient.VectorStoresEndpoint.ListVectorStoreBatchFilesAsync(vectorStore, vectorStoreFileBatch); + Assert.IsNotNull(vectorStoreBatchFiles); + Assert.IsNotEmpty(vectorStoreBatchFiles.Items); + + foreach (var file in vectorStoreBatchFiles.Items) + { + // get vector store batch file + var retrievedVectorStoreBatchFile = await OpenAIClient.VectorStoresEndpoint.GetVectorStoreFileAsync(vectorStore, file); + Assert.IsNotNull(retrievedVectorStoreBatchFile); + Assert.AreEqual(file.Id, retrievedVectorStoreBatchFile.Id); + } + } + finally + { + if (vectorStore != null) + { + var deletedVectorStore = await OpenAIClient.VectorStoresEndpoint.DeleteVectorStoreAsync(vectorStore); + Assert.IsNotNull(deletedVectorStore); + Assert.IsTrue(deletedVectorStore); + } + } + } + finally + { + if (!files.IsEmpty) + { + var deleteTasks = files.Select(file => OpenAIClient.FilesEndpoint.DeleteFileAsync(file)).ToList(); + await Task.WhenAll(deleteTasks).ConfigureAwait(false); + Assert.IsTrue(deleteTasks.TrueForAll(task => task.Result)); + } + } + } + } +} diff --git a/OpenAI-DotNet-Tests/TestFixture_12_Batches.cs b/OpenAI-DotNet-Tests/TestFixture_12_Batches.cs new file mode 100644 index 00000000..94dbd1de --- /dev/null +++ b/OpenAI-DotNet-Tests/TestFixture_12_Batches.cs @@ -0,0 +1,77 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using NUnit.Framework; +using OpenAI.Batch; +using OpenAI.Files; +using System.IO; +using System.Threading.Tasks; + +namespace OpenAI.Tests +{ + internal class TestFixture_12_Batches : AbstractTestFixture + { + [Test] + public async Task Test_01_Batches() + { + Assert.IsNotNull(OpenAIClient.BatchEndpoint); + + const string testFilePath = "batch.txt"; + await File.WriteAllTextAsync(testFilePath, "{\"custom_id\": \"request-1\", \"method\": \"POST\", \"url\": \"/v1/chat/completions\", \"body\": {\"model\": \"gpt-3.5-turbo\", \"messages\": [{\"role\": \"system\", \"content\": \"You are a helpful assistant.\"}, {\"role\": \"user\", \"content\": \"What is 2+2?\"}]}}\r\n"); + Assert.IsTrue(File.Exists(testFilePath)); + FileResponse file = null; + + try + { + try + { + file = await OpenAIClient.FilesEndpoint.UploadFileAsync(testFilePath, FilePurpose.Batch); + } + finally + { + if (File.Exists(testFilePath)) + { + File.Delete(testFilePath); + } + + Assert.IsFalse(File.Exists(testFilePath)); + } + + BatchResponse batch = null; + + try + { + // create batch + var batchRequest = new CreateBatchRequest(file, Endpoint.ChatCompletions); + batch = await OpenAIClient.BatchEndpoint.CreateBatchAsync(batchRequest); + Assert.NotNull(batch); + + // list batches + var listResponse = await OpenAIClient.BatchEndpoint.ListBatchesAsync(); + Assert.NotNull(listResponse); + Assert.NotNull(listResponse.Items); + + // retrieve batch + var retrievedBatch = await OpenAIClient.BatchEndpoint.RetrieveBatchAsync(batch); + Assert.NotNull(retrievedBatch); + } + finally + { + // cancel batch + if (batch != null) + { + var isCancelled = await OpenAIClient.BatchEndpoint.CancelBatchAsync(batch); + Assert.IsTrue(isCancelled); + } + } + } + finally + { + if (file != null) + { + var isDeleted = await OpenAIClient.FilesEndpoint.DeleteFileAsync(file); + Assert.IsTrue(isDeleted); + } + } + } + } +} diff --git a/OpenAI-DotNet-Tests/TestServices/WeatherService.cs b/OpenAI-DotNet-Tests/TestServices/WeatherService.cs index 3da18cd1..6499c4f6 100644 --- a/OpenAI-DotNet-Tests/TestServices/WeatherService.cs +++ b/OpenAI-DotNet-Tests/TestServices/WeatherService.cs @@ -18,7 +18,7 @@ public static async Task GetCurrentWeatherAsync( [FunctionParameter("The location the user is currently in.")] string location, [FunctionParameter("The units the user has requested temperature in. Typically this is based on the users location.")] WeatherUnit unit) { - var temp = new Random().Next(-10, 40); + var temp = new Random().Next(10, 40); temp = unit switch { @@ -31,4 +31,4 @@ public static async Task GetCurrentWeatherAsync( public static int CelsiusToFahrenheit(int celsius) => (celsius * 9 / 5) + 32; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Assistants/AssistantExtensions.cs b/OpenAI-DotNet/Assistants/AssistantExtensions.cs index 32ec7796..859e04bc 100644 --- a/OpenAI-DotNet/Assistants/AssistantExtensions.cs +++ b/OpenAI-DotNet/Assistants/AssistantExtensions.cs @@ -21,31 +21,151 @@ public static class AssistantExtensions /// Optional, . /// . public static async Task ModifyAsync(this AssistantResponse assistant, CreateAssistantRequest request, CancellationToken cancellationToken = default) - { - request = new CreateAssistantRequest(assistant: assistant, model: request.Model, name: request.Name, description: request.Description, instructions: request.Instructions, tools: request.Tools, files: request.FileIds, metadata: request.Metadata); - return await assistant.Client.AssistantsEndpoint.ModifyAssistantAsync(assistantId: assistant.Id, request: request, cancellationToken: cancellationToken).ConfigureAwait(continueOnCapturedContext: false); - } + => await assistant.Client.AssistantsEndpoint.ModifyAssistantAsync( + assistantId: assistant.Id, + request: new CreateAssistantRequest( + assistant: assistant, + model: request.Model, + name: request.Name, + description: request.Description, + instructions: request.Instructions, + toolResources: request.ToolResources, + tools: request.Tools, + metadata: request.Metadata, + temperature: request.Temperature, + topP: request.TopP, + responseFormat: request.ResponseFormat), + cancellationToken: cancellationToken).ConfigureAwait(false); /// /// Delete the assistant. /// /// . + /// Optional, should tool resources, such as vector stores be deleted when this assistant is deleted? /// Optional, . /// True, if the was successfully deleted. - public static async Task DeleteAsync(this AssistantResponse assistant, CancellationToken cancellationToken = default) - => await assistant.Client.AssistantsEndpoint.DeleteAssistantAsync(assistant.Id, cancellationToken).ConfigureAwait(false); + public static async Task DeleteAsync(this AssistantResponse assistant, bool deleteToolResources = false, CancellationToken cancellationToken = default) + { + var deleteTasks = new List> { assistant.Client.AssistantsEndpoint.DeleteAssistantAsync(assistant.Id, cancellationToken) }; + + if (deleteToolResources && assistant.ToolResources?.FileSearch?.VectorStoreIds is { Count: > 0 }) + { + deleteTasks.AddRange( + from vectorStoreId in assistant.ToolResources?.FileSearch?.VectorStoreIds + where !string.IsNullOrWhiteSpace(vectorStoreId) + select assistant.Client.VectorStoresEndpoint.DeleteVectorStoreAsync(vectorStoreId, cancellationToken)); + } + + await Task.WhenAll(deleteTasks).ConfigureAwait(false); + return deleteTasks.TrueForAll(task => task.Result); + } /// /// Create a thread and run it. /// /// . /// Optional, . + /// Optional, stream callback handler. /// Optional, . /// . - public static async Task CreateThreadAndRunAsync(this AssistantResponse assistant, CreateThreadRequest request = null, CancellationToken cancellationToken = default) - => await assistant.Client.ThreadsEndpoint.CreateThreadAndRunAsync(new CreateThreadAndRunRequest(assistant.Id, createThreadRequest: request), cancellationToken).ConfigureAwait(false); + public static async Task CreateThreadAndRunAsync(this AssistantResponse assistant, CreateThreadRequest request = null, Action streamEventHandler = null, CancellationToken cancellationToken = default) + => await assistant.Client.ThreadsEndpoint.CreateThreadAndRunAsync(new CreateThreadAndRunRequest(assistant.Id, createThreadRequest: request), streamEventHandler, cancellationToken).ConfigureAwait(false); + + #region Tools + + /// + /// Invoke the assistant's tool function using the . + /// + /// . + /// . + /// Tool output result as . + public static string InvokeToolCall(this AssistantResponse assistant, ToolCall toolCall) + { + if (toolCall.Type != "function") + { + throw new InvalidOperationException($"Cannot invoke built in tool {toolCall.Type}"); + } + + var tool = assistant.Tools.FirstOrDefault(tool => tool.Type == "function" && tool.Function.Name == toolCall.FunctionCall.Name) ?? + throw new InvalidOperationException($"Failed to find a valid tool for [{toolCall.Id}] {toolCall.Type}"); + tool.Function.Arguments = toolCall.FunctionCall.Arguments; + return tool.InvokeFunction(); + } - #region Files + /// + /// Invoke the assistant's tool function using the . + /// + /// . + /// . + /// Optional, . + /// Tool output result as . + public static async Task InvokeToolCallAsync(this AssistantResponse assistant, ToolCall toolCall, CancellationToken cancellationToken = default) + { + if (toolCall.Type != "function") + { + throw new InvalidOperationException($"Cannot invoke built in tool {toolCall.Type}"); + } + + var tool = assistant.Tools.FirstOrDefault(tool => tool.Type == "function" && tool.Function.Name == toolCall.FunctionCall.Name) ?? + throw new InvalidOperationException($"Failed to find a valid tool for [{toolCall.Id}] {toolCall.Type}"); + tool.Function.Arguments = toolCall.FunctionCall.Arguments; + return await tool.InvokeFunctionAsync(cancellationToken).ConfigureAwait(false); + } + + /// + /// Calls the tool's function, with the provided arguments from the toolCall and returns the output. + /// + /// . + /// . + /// . + public static ToolOutput GetToolOutput(this AssistantResponse assistant, ToolCall toolCall) + => new(toolCall.Id, assistant.InvokeToolCall(toolCall)); + + /// + /// Calls each tool's function, with the provided arguments from the toolCalls and returns the outputs. + /// + /// . + /// A collection of s. + /// A collection of s. + public static IReadOnlyList GetToolOutputs(this AssistantResponse assistant, IEnumerable toolCalls) + => toolCalls.Select(assistant.GetToolOutput).ToList(); + + /// + /// Calls the tool's function, with the provided arguments from the toolCall and returns the output. + /// + /// . + /// . + /// Optional, . + /// . + public static async Task GetToolOutputAsync(this AssistantResponse assistant, ToolCall toolCall, CancellationToken cancellationToken = default) + { + var output = await assistant.InvokeToolCallAsync(toolCall, cancellationToken).ConfigureAwait(false); + return new ToolOutput(toolCall.Id, output); + } + + /// + /// Calls each tool's function, with the provided arguments from the toolCalls and returns the outputs. + /// + /// . + /// A collection of s. + /// Optional, . + /// A collection of s. + public static async Task> GetToolOutputsAsync(this AssistantResponse assistant, IEnumerable toolCalls, CancellationToken cancellationToken = default) + => await Task.WhenAll(toolCalls.Select(async toolCall => await assistant.GetToolOutputAsync(toolCall, cancellationToken).ConfigureAwait(false))).ConfigureAwait(false); + + /// + /// Calls each tool's function, with the provided arguments from the toolCalls and returns the outputs. + /// + /// . + /// The to complete the tool calls for. + /// Optional, . + /// A collection of s. + public static async Task> GetToolOutputsAsync(this AssistantResponse assistant, RunResponse run, CancellationToken cancellationToken = default) + => await GetToolOutputsAsync(assistant, run.RequiredAction.SubmitToolOutputs.ToolCalls, cancellationToken).ConfigureAwait(false); + + #endregion Tools + + #region Files (Obsolete) /// /// Returns a list of assistant files. @@ -54,6 +174,7 @@ public static async Task CreateThreadAndRunAsync(this AssistantResp /// . /// Optional, . /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task> ListFilesAsync(this AssistantResponse assistant, ListQuery query = null, CancellationToken cancellationToken = default) => await assistant.Client.AssistantsEndpoint.ListFilesAsync(assistant.Id, query, cancellationToken).ConfigureAwait(false); @@ -67,6 +188,7 @@ public static async Task> ListFilesAsync(thi /// /// Optional, . /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task AttachFileAsync(this AssistantResponse assistant, FileResponse file, CancellationToken cancellationToken = default) => await assistant.Client.AssistantsEndpoint.AttachFileAsync(assistant.Id, file, cancellationToken).ConfigureAwait(false); @@ -77,9 +199,10 @@ public static async Task AttachFileAsync(this AssistantRe /// The local file path to upload. /// Optional, . /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task UploadFileAsync(this AssistantResponse assistant, string filePath, CancellationToken cancellationToken = default) { - var file = await assistant.Client.FilesEndpoint.UploadFileAsync(new FileUploadRequest(filePath, "assistants"), cancellationToken).ConfigureAwait(false); + var file = await assistant.Client.FilesEndpoint.UploadFileAsync(new FileUploadRequest(filePath, FilePurpose.Assistants), cancellationToken).ConfigureAwait(false); return await assistant.AttachFileAsync(file, cancellationToken).ConfigureAwait(false); } @@ -91,10 +214,10 @@ public static async Task UploadFileAsync(this AssistantRe /// The name of the file. /// Optional, . /// . - + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task UploadFileAsync(this AssistantResponse assistant, Stream stream, string fileName, CancellationToken cancellationToken = default) { - var file = await assistant.Client.FilesEndpoint.UploadFileAsync(new FileUploadRequest(stream, fileName, "assistants"), cancellationToken).ConfigureAwait(false); + var file = await assistant.Client.FilesEndpoint.UploadFileAsync(new FileUploadRequest(stream, fileName, FilePurpose.Assistants), cancellationToken).ConfigureAwait(false); return await assistant.AttachFileAsync(file, cancellationToken).ConfigureAwait(false); } @@ -105,21 +228,10 @@ public static async Task UploadFileAsync(this AssistantRe /// The ID of the file we're getting. /// Optional, . /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task RetrieveFileAsync(this AssistantResponse assistant, string fileId, CancellationToken cancellationToken = default) => await assistant.Client.AssistantsEndpoint.RetrieveFileAsync(assistant.Id, fileId, cancellationToken).ConfigureAwait(false); - // TODO 400 bad request errors. Likely OpenAI bug downloading assistant file content. - ///// - ///// Downloads the to the specified . - ///// - ///// . - ///// The directory to download the file into. - ///// Optional, delete the cached file. Defaults to false. - ///// Optional, . - ///// The full path of the downloaded file. - //public static async Task DownloadFileAsync(this AssistantFileResponse assistantFile, string directory, bool deleteCachedFile = false, CancellationToken cancellationToken = default) - // => await assistantFile.Client.FilesEndpoint.DownloadFileAsync(assistantFile.Id, directory, deleteCachedFile, cancellationToken).ConfigureAwait(false); - /// /// Remove the file from the assistant it is attached to. /// @@ -131,6 +243,7 @@ public static async Task RetrieveFileAsync(this Assistant /// . /// Optional, . /// True, if file was removed. + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task RemoveFileAsync(this AssistantFileResponse file, CancellationToken cancellationToken = default) => await file.Client.AssistantsEndpoint.RemoveFileAsync(file.AssistantId, file.Id, cancellationToken).ConfigureAwait(false); @@ -146,6 +259,7 @@ public static async Task RemoveFileAsync(this AssistantFileResponse file, /// The ID of the file to remove. /// Optional, . /// True, if file was removed. + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task RemoveFileAsync(this AssistantResponse assistant, string fileId, CancellationToken cancellationToken = default) => await assistant.Client.AssistantsEndpoint.RemoveFileAsync(assistant.Id, fileId, cancellationToken).ConfigureAwait(false); @@ -155,6 +269,7 @@ public static async Task RemoveFileAsync(this AssistantResponse assistant, /// . /// Optional, . /// True, if the file was successfully removed from the assistant and deleted. + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task DeleteFileAsync(this AssistantFileResponse file, CancellationToken cancellationToken = default) { var isRemoved = await file.RemoveFileAsync(cancellationToken).ConfigureAwait(false); @@ -168,6 +283,7 @@ public static async Task DeleteFileAsync(this AssistantFileResponse file, /// The ID of the file to delete. /// Optional, . /// True, if the file was successfully removed from the assistant and deleted. + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public static async Task DeleteFileAsync(this AssistantResponse assistant, string fileId, CancellationToken cancellationToken = default) { var isRemoved = await assistant.Client.AssistantsEndpoint.RemoveFileAsync(assistant.Id, fileId, cancellationToken).ConfigureAwait(false); @@ -175,90 +291,6 @@ public static async Task DeleteFileAsync(this AssistantResponse assistant, return await assistant.Client.FilesEndpoint.DeleteFileAsync(fileId, cancellationToken).ConfigureAwait(false); } - #endregion Files - - #region Tools - - /// - /// Invoke the assistant's tool function using the . - /// - /// . - /// . - /// Tool output result as - public static string InvokeToolCall(this AssistantResponse assistant, ToolCall toolCall) - { - if (toolCall.Type != "function") - { - throw new InvalidOperationException($"Cannot invoke built in tool {toolCall.Type}"); - } - - var tool = assistant.Tools.FirstOrDefault(tool => tool.Type == "function" && tool.Function.Name == toolCall.FunctionCall.Name) ?? - throw new InvalidOperationException($"Failed to find a valid tool for [{toolCall.Id}] {toolCall.Type}"); - tool.Function.Arguments = toolCall.FunctionCall.Arguments; - return tool.InvokeFunction(); - } - - /// - /// Invoke the assistant's tool function using the . - /// - /// . - /// . - /// Optional, . - /// Tool output result as - public static async Task InvokeToolCallAsync(this AssistantResponse assistant, ToolCall toolCall, CancellationToken cancellationToken = default) - { - if (toolCall.Type != "function") - { - throw new InvalidOperationException($"Cannot invoke built in tool {toolCall.Type}"); - } - - var tool = assistant.Tools.FirstOrDefault(tool => tool.Type == "function" && tool.Function.Name == toolCall.FunctionCall.Name) ?? - throw new InvalidOperationException($"Failed to find a valid tool for [{toolCall.Id}] {toolCall.Type}"); - tool.Function.Arguments = toolCall.FunctionCall.Arguments; - return await tool.InvokeFunctionAsync(cancellationToken).ConfigureAwait(false); - } - - /// - /// Calls the tool's function, with the provided arguments from the toolCall and returns the output. - /// - /// . - /// . - /// . - public static ToolOutput GetToolOutput(this AssistantResponse assistant, ToolCall toolCall) - => new(toolCall.Id, assistant.InvokeToolCall(toolCall)); - - /// - /// Calls each tool's function, with the provided arguments from the toolCalls and returns the outputs. - /// - /// . - /// A collection of s. - /// A collection of s. - public static IReadOnlyList GetToolOutputs(this AssistantResponse assistant, IEnumerable toolCalls) - => toolCalls.Select(assistant.GetToolOutput).ToList(); - - /// - /// Calls the tool's function, with the provided arguments from the toolCall and returns the output. - /// - /// . - /// . - /// Optional, . - /// . - public static async Task GetToolOutputAsync(this AssistantResponse assistant, ToolCall toolCall, CancellationToken cancellationToken = default) - { - var output = await assistant.InvokeToolCallAsync(toolCall, cancellationToken).ConfigureAwait(false); - return new ToolOutput(toolCall.Id, output); - } - - /// - /// Calls each tool's function, with the provided arguments from the toolCalls and returns the outputs. - /// - /// . - /// A collection of s. - /// Optional, . - /// A collection of s. - public static async Task> GetToolOutputsAsync(this AssistantResponse assistant, IEnumerable toolCalls, CancellationToken cancellationToken = default) - => await Task.WhenAll(toolCalls.Select(async toolCall => await assistant.GetToolOutputAsync(toolCall, cancellationToken).ConfigureAwait(false))).ConfigureAwait(false); - - #endregion Tools + #endregion Files (Obsolete) } } diff --git a/OpenAI-DotNet/Assistants/AssistantFileResponse.cs b/OpenAI-DotNet/Assistants/AssistantFileResponse.cs index b2e8fc97..b1f3a153 100644 --- a/OpenAI-DotNet/Assistants/AssistantFileResponse.cs +++ b/OpenAI-DotNet/Assistants/AssistantFileResponse.cs @@ -8,6 +8,7 @@ namespace OpenAI.Assistants /// /// File attached to an assistant. /// + [Obsolete("Removed. Use Assistant.ToolResources instead.")] public sealed class AssistantFileResponse : BaseResponse { /// @@ -45,4 +46,4 @@ public sealed class AssistantFileResponse : BaseResponse public override string ToString() => Id; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Assistants/AssistantResponse.cs b/OpenAI-DotNet/Assistants/AssistantResponse.cs index f33efd8e..cefaa2e2 100644 --- a/OpenAI-DotNet/Assistants/AssistantResponse.cs +++ b/OpenAI-DotNet/Assistants/AssistantResponse.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Collections.Generic; using System.Text.Json.Serialization; @@ -77,14 +78,24 @@ public sealed class AssistantResponse : BaseResponse [JsonPropertyName("tools")] public IReadOnlyList Tools { get; private set; } + /// + /// A set of resources that are used by the assistant's tools. + /// The resources are specific to the type of tool. + /// For example, the code_interpreter tool requires a list of file IDs, + /// while the file_search tool requires a list of vector store IDs. + /// + [JsonInclude] + [JsonPropertyName("tool_resources")] + public ToolResources ToolResources { get; private set; } + /// /// A list of file IDs attached to this assistant. /// There can be a maximum of 20 files attached to the assistant. /// Files are ordered by their creation date in ascending order. /// - [JsonInclude] - [JsonPropertyName("file_ids")] - public IReadOnlyList FileIds { get; private set; } + [JsonIgnore] + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] + public IReadOnlyList FileIds => null; /// /// Set of 16 key-value pairs that can be attached to an object. @@ -95,8 +106,43 @@ public sealed class AssistantResponse : BaseResponse [JsonPropertyName("metadata")] public IReadOnlyDictionary Metadata { get; private set; } + /// + /// What sampling temperature to use, between 0 and 2. + /// Higher values like 0.8 will make the output more random, + /// while lower values like 0.2 will make it more focused and deterministic. + /// + [JsonInclude] + [JsonPropertyName("temperature")] + public double Temperature { get; private set; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + [JsonInclude] + [JsonPropertyName("top_p")] + public double TopP { get; private set; } + + /// + /// Specifies the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON. + /// + /// + /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + [JsonInclude] + [JsonPropertyName("response_format")] + [JsonConverter(typeof(ResponseFormatConverter))] + public ChatResponseFormat ResponseFormat { get; private set; } + public static implicit operator string(AssistantResponse assistant) => assistant?.Id; public override string ToString() => Id; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Assistants/AssistantsEndpoint.cs b/OpenAI-DotNet/Assistants/AssistantsEndpoint.cs index b51a78b6..45316fa5 100644 --- a/OpenAI-DotNet/Assistants/AssistantsEndpoint.cs +++ b/OpenAI-DotNet/Assistants/AssistantsEndpoint.cs @@ -20,11 +20,11 @@ internal AssistantsEndpoint(OpenAIClient client) : base(client) { } /// /// . /// Optional, . - /// + /// . public async Task> ListAssistantsAsync(ListQuery query = null, CancellationToken cancellationToken = default) { using var response = await client.Client.GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); return response.Deserialize>(responseAsString, client); } @@ -37,9 +37,9 @@ public async Task> ListAssistantsAsync(ListQuery public async Task CreateAssistantAsync(CreateAssistantRequest request = null, CancellationToken cancellationToken = default) { request ??= new CreateAssistantRequest(); - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl(), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -52,7 +52,7 @@ public async Task CreateAssistantAsync(CreateAssistantRequest public async Task RetrieveAssistantAsync(string assistantId, CancellationToken cancellationToken = default) { using var response = await client.Client.GetAsync(GetUrl($"/{assistantId}"), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -65,9 +65,9 @@ public async Task RetrieveAssistantAsync(string assistantId, /// . public async Task ModifyAssistantAsync(string assistantId, CreateAssistantRequest request, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{assistantId}"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{assistantId}"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -80,11 +80,11 @@ public async Task ModifyAssistantAsync(string assistantId, Cr public async Task DeleteAssistantAsync(string assistantId, CancellationToken cancellationToken = default) { using var response = await client.Client.DeleteAsync(GetUrl($"/{assistantId}"), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions)?.Deleted ?? false; + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client)?.Deleted ?? false; } - #region Files + #region Files (Obsolete) /// /// Returns a list of assistant files. @@ -93,10 +93,11 @@ public async Task DeleteAssistantAsync(string assistantId, CancellationTok /// . /// Optional, . /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public async Task> ListFilesAsync(string assistantId, ListQuery query = null, CancellationToken cancellationToken = default) { using var response = await client.Client.GetAsync(GetUrl($"/{assistantId}/files", query), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); return response.Deserialize>(responseAsString, client); } @@ -110,16 +111,17 @@ public async Task> ListFilesAsync(string ass /// /// Optional, . /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public async Task AttachFileAsync(string assistantId, FileResponse file, CancellationToken cancellationToken = default) { - if (file?.Purpose?.Equals("assistants") != true) + if (file?.Purpose?.Equals(FilePurpose.Assistants) != true) { throw new InvalidOperationException($"{nameof(file)}.{nameof(file.Purpose)} must be 'assistants'!"); } - using var jsonContent = JsonSerializer.Serialize(new { file_id = file.Id }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{assistantId}/files"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(new { file_id = file.Id }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{assistantId}/files"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -130,10 +132,11 @@ public async Task AttachFileAsync(string assistantId, Fil /// The ID of the file we're getting. /// Optional, . /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public async Task RetrieveFileAsync(string assistantId, string fileId, CancellationToken cancellationToken = default) { using var response = await client.Client.GetAsync(GetUrl($"/{assistantId}/files/{fileId}"), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -149,13 +152,14 @@ public async Task RetrieveFileAsync(string assistantId, s /// The ID of the file to delete. /// Optional, . /// True, if file was removed. + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] public async Task RemoveFileAsync(string assistantId, string fileId, CancellationToken cancellationToken = default) { using var response = await client.Client.DeleteAsync(GetUrl($"/{assistantId}/files/{fileId}"), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions)?.Deleted ?? false; + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client)?.Deleted ?? false; } - #endregion Files + #endregion Files (Obsolete) } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Assistants/CreateAssistantRequest.cs b/OpenAI-DotNet/Assistants/CreateAssistantRequest.cs index da20605c..48431e47 100644 --- a/OpenAI-DotNet/Assistants/CreateAssistantRequest.cs +++ b/OpenAI-DotNet/Assistants/CreateAssistantRequest.cs @@ -1,5 +1,7 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; +using System; using System.Collections.Generic; using System.Linq; using System.Text.Json.Serialization; @@ -34,25 +36,72 @@ public sealed class CreateAssistantRequest /// There can be a maximum of 128 tools per assistant. /// Tools can be of types 'code_interpreter', 'retrieval', or 'function'. /// - /// - /// A list of file IDs attached to this assistant. - /// There can be a maximum of 20 files attached to the assistant. - /// Files are ordered by their creation date in ascending order. + /// + /// A set of resources that are used by Assistants and Threads. The resources are specific to the type of tool. + /// For example, the requres a list of file ids, + /// While the requires a list vector store ids. /// /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. /// - public CreateAssistantRequest(AssistantResponse assistant, string model = null, string name = null, string description = null, string instructions = null, IEnumerable tools = null, IEnumerable files = null, IReadOnlyDictionary metadata = null) - : this( - string.IsNullOrWhiteSpace(model) ? assistant.Model : model, - string.IsNullOrWhiteSpace(name) ? assistant.Name : name, - string.IsNullOrWhiteSpace(description) ? assistant.Description : description, - string.IsNullOrWhiteSpace(instructions) ? assistant.Instructions : instructions, - tools ?? assistant.Tools, - files ?? assistant.FileIds, - metadata ?? assistant.Metadata) + /// + /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, + /// while lower values like 0.2 will make it more focused and deterministic. + /// + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// We generally recommend altering this or temperature but not both. + /// + /// + /// Specifies the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON.
+ /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + public CreateAssistantRequest( + AssistantResponse assistant, + string model = null, + string name = null, + string description = null, + string instructions = null, + IEnumerable tools = null, + ToolResources toolResources = null, + IReadOnlyDictionary metadata = null, + double? temperature = null, + double? topP = null, + ChatResponseFormat responseFormat = ChatResponseFormat.Auto) + : this( + string.IsNullOrWhiteSpace(model) ? assistant.Model : model, + string.IsNullOrWhiteSpace(name) ? assistant.Name : name, + string.IsNullOrWhiteSpace(description) ? assistant.Description : description, + string.IsNullOrWhiteSpace(instructions) ? assistant.Instructions : instructions, + tools ?? assistant.Tools, + toolResources ?? assistant.ToolResources, + metadata ?? assistant.Metadata, + temperature, + topP, + responseFormat) + { + } + + [Obsolete("use new .ctr")] + public CreateAssistantRequest( + AssistantResponse assistant, + string model = null, + string name = null, + string description = null, + string instructions = null, + IEnumerable tools = null, + IEnumerable files = null, + IReadOnlyDictionary metadata = null) { } @@ -74,32 +123,65 @@ public CreateAssistantRequest(AssistantResponse assistant, string model = null, /// /// /// The system instructions that the assistant uses. - /// The maximum length is 32768 characters. + /// The maximum length is 256,000 characters. /// /// /// A list of tool enabled on the assistant. /// There can be a maximum of 128 tools per assistant. /// Tools can be of types 'code_interpreter', 'retrieval', or 'function'. /// - /// - /// A list of file IDs attached to this assistant. - /// There can be a maximum of 20 files attached to the assistant. - /// Files are ordered by their creation date in ascending order. + /// + /// A set of resources that are used by Assistants and Threads. The resources are specific to the type of tool. + /// For example, the requres a list of file ids, + /// While the requires a list vector store ids. /// /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. /// - public CreateAssistantRequest(string model = null, string name = null, string description = null, string instructions = null, IEnumerable tools = null, IEnumerable files = null, IReadOnlyDictionary metadata = null) + /// + /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, + /// while lower values like 0.2 will make it more focused and deterministic. + /// + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// We generally recommend altering this or temperature but not both. + /// + /// + /// Specifies the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON.
+ /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + public CreateAssistantRequest( + string model = null, + string name = null, + string description = null, + string instructions = null, + IEnumerable tools = null, + ToolResources toolResources = null, + IReadOnlyDictionary metadata = null, + double? temperature = null, + double? topP = null, + ChatResponseFormat responseFormat = ChatResponseFormat.Auto) { - Model = string.IsNullOrWhiteSpace(model) ? Models.Model.GPT3_5_Turbo : model; + Model = string.IsNullOrWhiteSpace(model) ? Models.Model.GPT4o : model; Name = name; Description = description; Instructions = instructions; Tools = tools?.ToList(); - FileIds = files?.ToList(); + ToolResources = toolResources; Metadata = metadata; + Temperature = temperature; + TopP = topP; + ResponseFormat = responseFormat; } /// @@ -126,7 +208,7 @@ public CreateAssistantRequest(string model = null, string name = null, string de /// /// The system instructions that the assistant uses. - /// The maximum length is 32768 characters. + /// The maximum length is 256,000 characters. /// [JsonPropertyName("instructions")] public string Instructions { get; } @@ -140,12 +222,45 @@ public CreateAssistantRequest(string model = null, string name = null, string de public IReadOnlyList Tools { get; } /// - /// A list of file IDs attached to this assistant. - /// There can be a maximum of 20 files attached to the assistant. - /// Files are ordered by their creation date in ascending order. + /// A set of resources that are used by Assistants and Threads. The resources are specific to the type of tool. + /// For example, the requres a list of file ids, + /// While the requires a list vector store ids. + /// + [JsonPropertyName("tool_resources")] + public ToolResources ToolResources { get; } + + /// + /// What sampling temperature to use, between 0 and 2. + /// Higher values like 0.8 will make the output more random, + /// while lower values like 0.2 will make it more focused and deterministic. + /// + [JsonPropertyName("temperature")] + public double? Temperature { get; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + [JsonPropertyName("top_p")] + public double? TopP { get; } + + /// + /// Specifies the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON. /// - [JsonPropertyName("file_ids")] - public IReadOnlyList FileIds { get; } + /// + /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + [JsonPropertyName("response_format")] + [JsonConverter(typeof(ResponseFormatConverter))] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ChatResponseFormat ResponseFormat { get; } /// /// Set of 16 key-value pairs that can be attached to an object. @@ -155,4 +270,4 @@ public CreateAssistantRequest(string model = null, string name = null, string de [JsonPropertyName("metadata")] public IReadOnlyDictionary Metadata { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/AudioEndpoint.cs b/OpenAI-DotNet/Audio/AudioEndpoint.cs index 11688a0f..39798c19 100644 --- a/OpenAI-DotNet/Audio/AudioEndpoint.cs +++ b/OpenAI-DotNet/Audio/AudioEndpoint.cs @@ -29,12 +29,12 @@ public AudioEndpoint(OpenAIClient client) : base(client) { } /// . /// Optional, partial chunk callback to stream audio as it arrives. /// Optional, . - /// + /// . public async Task> CreateSpeechAsync(SpeechRequest request, Func, Task> chunkCallback = null, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl("/speech"), jsonContent, cancellationToken).ConfigureAwait(false); - await response.CheckResponseAsync(false, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl("/speech"), payload, cancellationToken).ConfigureAwait(false); + await response.CheckResponseAsync(false, payload, cancellationToken: cancellationToken).ConfigureAwait(false); await using var responseStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); await using var memoryStream = new MemoryStream(); int bytesRead; @@ -60,14 +60,10 @@ public async Task> CreateSpeechAsync(SpeechRequest request, totalBytesRead += bytesRead; } - await response.CheckResponseAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + await response.CheckResponseAsync(EnableDebug, payload, cancellationToken: cancellationToken).ConfigureAwait(false); return new ReadOnlyMemory(memoryStream.GetBuffer(), 0, totalBytesRead); } - [Obsolete("Use CreateTranscriptionTextAsync or CreateTranscriptionJsonAsync instead.")] - public async Task CreateTranscriptionAsync(AudioTranscriptionRequest request, CancellationToken cancellationToken = default) - => await CreateTranscriptionTextAsync(request, cancellationToken).ConfigureAwait(false); - /// /// Transcribes audio into the input language. /// @@ -76,9 +72,9 @@ public async Task CreateTranscriptionAsync(AudioTranscriptionRequest req /// The transcribed text. public async Task CreateTranscriptionTextAsync(AudioTranscriptionRequest request, CancellationToken cancellationToken = default) { - var responseAsString = await Internal_CreateTranscriptionAsync(request, cancellationToken).ConfigureAwait(false); + var (response, responseAsString) = await Internal_CreateTranscriptionAsync(request, cancellationToken).ConfigureAwait(false); return request.ResponseFormat is AudioResponseFormat.Json or AudioResponseFormat.Verbose_Json - ? JsonSerializer.Deserialize(responseAsString)?.Text + ? response.Deserialize(responseAsString, client)?.Text : responseAsString; } @@ -96,11 +92,11 @@ public async Task CreateTranscriptionJsonAsync(AudioTranscription throw new ArgumentException("Response format must be Json or Verbose Json.", nameof(request.ResponseFormat)); } - var responseAsString = await Internal_CreateTranscriptionAsync(request, cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString); + var (response, responseAsString) = await Internal_CreateTranscriptionAsync(request, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); } - private async Task Internal_CreateTranscriptionAsync(AudioTranscriptionRequest request, CancellationToken cancellationToken = default) + private async Task<(HttpResponseMessage, string)> Internal_CreateTranscriptionAsync(AudioTranscriptionRequest request, CancellationToken cancellationToken = default) { using var content = new MultipartFormDataContent(); using var audioData = new MemoryStream(); @@ -136,14 +132,10 @@ private async Task Internal_CreateTranscriptionAsync(AudioTranscriptionR request.Dispose(); using var response = await client.Client.PostAsync(GetUrl("/transcriptions"), content, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, content, null, cancellationToken).ConfigureAwait(false); - return responseAsString; + var responseAsString = await response.ReadAsStringAsync(EnableDebug, content, cancellationToken).ConfigureAwait(false); + return (response, responseAsString); } - [Obsolete("Use CreateTranslationTextAsync or CreateTranslationJsonAsync instead.")] - public async Task CreateTranslationAsync(AudioTranslationRequest request, CancellationToken cancellationToken = default) - => await CreateTranslationTextAsync(request, cancellationToken).ConfigureAwait(false); - /// /// Translates audio into English. /// @@ -152,9 +144,9 @@ public async Task CreateTranslationAsync(AudioTranslationRequest request /// The translated text. public async Task CreateTranslationTextAsync(AudioTranslationRequest request, CancellationToken cancellationToken = default) { - var responseAsString = await Internal_CreateTranslationAsync(request, cancellationToken).ConfigureAwait(false); + var (response, responseAsString) = await Internal_CreateTranslationAsync(request, cancellationToken).ConfigureAwait(false); return request.ResponseFormat is AudioResponseFormat.Json or AudioResponseFormat.Verbose_Json - ? JsonSerializer.Deserialize(responseAsString)?.Text + ? response.Deserialize(responseAsString, client)?.Text : responseAsString; } @@ -172,11 +164,11 @@ public async Task CreateTranslationJsonAsync(AudioTranslationRequ throw new ArgumentException("Response format must be Json or Verbose Json.", nameof(request.ResponseFormat)); } - var responseAsString = await Internal_CreateTranslationAsync(request, cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString); + var (response, responseAsString) = await Internal_CreateTranslationAsync(request, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); } - private async Task Internal_CreateTranslationAsync(AudioTranslationRequest request, CancellationToken cancellationToken = default) + private async Task<(HttpResponseMessage, string)> Internal_CreateTranslationAsync(AudioTranslationRequest request, CancellationToken cancellationToken = default) { using var content = new MultipartFormDataContent(); using var audioData = new MemoryStream(); @@ -199,8 +191,8 @@ private async Task Internal_CreateTranslationAsync(AudioTranslationReque request.Dispose(); using var response = await client.Client.PostAsync(GetUrl("/translations"), content, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, content, null, cancellationToken).ConfigureAwait(false); - return responseAsString; + var responseAsString = await response.ReadAsStringAsync(EnableDebug, content, cancellationToken).ConfigureAwait(false); + return (response, responseAsString); } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/AudioResponse.cs b/OpenAI-DotNet/Audio/AudioResponse.cs index 4b43c86a..8ad6fe05 100644 --- a/OpenAI-DotNet/Audio/AudioResponse.cs +++ b/OpenAI-DotNet/Audio/AudioResponse.cs @@ -4,7 +4,7 @@ namespace OpenAI.Audio { - public sealed class AudioResponse + public sealed class AudioResponse : BaseResponse { /// /// The language of the input audio. @@ -44,4 +44,4 @@ public sealed class AudioResponse [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public TranscriptionSegment[] Segments { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/AudioResponseFormat.cs b/OpenAI-DotNet/Audio/AudioResponseFormat.cs index 12197260..baa305ca 100644 --- a/OpenAI-DotNet/Audio/AudioResponseFormat.cs +++ b/OpenAI-DotNet/Audio/AudioResponseFormat.cs @@ -10,4 +10,4 @@ public enum AudioResponseFormat Srt, Vtt } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/AudioTranscriptionRequest.cs b/OpenAI-DotNet/Audio/AudioTranscriptionRequest.cs index 0ce6578a..94abc604 100644 --- a/OpenAI-DotNet/Audio/AudioTranscriptionRequest.cs +++ b/OpenAI-DotNet/Audio/AudioTranscriptionRequest.cs @@ -201,4 +201,4 @@ public void Dispose() GC.SuppressFinalize(this); } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/AudioTranslationRequest.cs b/OpenAI-DotNet/Audio/AudioTranslationRequest.cs index 502ed5de..21812c0a 100644 --- a/OpenAI-DotNet/Audio/AudioTranslationRequest.cs +++ b/OpenAI-DotNet/Audio/AudioTranslationRequest.cs @@ -140,4 +140,4 @@ public void Dispose() GC.SuppressFinalize(this); } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/SpeechRequest.cs b/OpenAI-DotNet/Audio/SpeechRequest.cs index 12978d23..24ed5355 100644 --- a/OpenAI-DotNet/Audio/SpeechRequest.cs +++ b/OpenAI-DotNet/Audio/SpeechRequest.cs @@ -1,8 +1,9 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. -using System.Text.Json.Serialization; using OpenAI.Extensions; using OpenAI.Models; +using System; +using System.Text.Json.Serialization; namespace OpenAI.Audio { @@ -18,7 +19,7 @@ public sealed class SpeechRequest /// The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default. public SpeechRequest(string input, Model model = null, SpeechVoice voice = SpeechVoice.Alloy, SpeechResponseFormat responseFormat = SpeechResponseFormat.MP3, float? speed = null) { - Input = input; + Input = !string.IsNullOrWhiteSpace(input) ? input : throw new ArgumentException("Input cannot be null or empty.", nameof(input)); Model = string.IsNullOrWhiteSpace(model?.Id) ? Models.Model.TTS_1 : model; Voice = voice; ResponseFormat = responseFormat; diff --git a/OpenAI-DotNet/Audio/TimestampGranularity.cs b/OpenAI-DotNet/Audio/TimestampGranularity.cs index dca4925e..a89887c2 100644 --- a/OpenAI-DotNet/Audio/TimestampGranularity.cs +++ b/OpenAI-DotNet/Audio/TimestampGranularity.cs @@ -8,4 +8,4 @@ public enum TimestampGranularity Word, Segment } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/TranscriptionSegment.cs b/OpenAI-DotNet/Audio/TranscriptionSegment.cs index c29cea43..ae9a39e1 100644 --- a/OpenAI-DotNet/Audio/TranscriptionSegment.cs +++ b/OpenAI-DotNet/Audio/TranscriptionSegment.cs @@ -82,4 +82,4 @@ public sealed class TranscriptionSegment [JsonPropertyName("no_speech_prob")] public double NoSpeechProbability { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Audio/TranscriptionWord.cs b/OpenAI-DotNet/Audio/TranscriptionWord.cs index 1ede1282..cf9a505b 100644 --- a/OpenAI-DotNet/Audio/TranscriptionWord.cs +++ b/OpenAI-DotNet/Audio/TranscriptionWord.cs @@ -30,4 +30,4 @@ public sealed class TranscriptionWord [JsonPropertyName("end")] public double End { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Authentication/AuthInfo.cs b/OpenAI-DotNet/Authentication/AuthInfo.cs index 94827178..fd371e04 100644 --- a/OpenAI-DotNet/Authentication/AuthInfo.cs +++ b/OpenAI-DotNet/Authentication/AuthInfo.cs @@ -8,11 +8,12 @@ namespace OpenAI internal class AuthInfo { internal const string SecretKeyPrefix = "sk-"; + internal const string ProjectPrefix = "proj_"; internal const string SessionKeyPrefix = "sess-"; internal const string OrganizationPrefix = "org-"; [JsonConstructor] - public AuthInfo(string apiKey, string organizationId = null) + public AuthInfo(string apiKey, string organizationId = null, string projectId = null) { ApiKey = apiKey; @@ -25,12 +26,25 @@ public AuthInfo(string apiKey, string organizationId = null) OrganizationId = organizationId; } + + if (!string.IsNullOrWhiteSpace(projectId)) + { + if (!projectId.Contains(ProjectPrefix)) + { + throw new InvalidCredentialException($"{nameof(projectId)} must start with '{ProjectPrefix}'"); + } + + ProjectId = projectId; + } } [JsonPropertyName("apiKey")] public string ApiKey { get; } - [JsonPropertyName("organization")] + [JsonPropertyName("organizationId")] public string OrganizationId { get; } + + [JsonPropertyName("projectId")] + public string ProjectId { get; } } } diff --git a/OpenAI-DotNet/Authentication/OpenAIAuthentication.cs b/OpenAI-DotNet/Authentication/OpenAIAuthentication.cs index ff245f24..bb701aa9 100644 --- a/OpenAI-DotNet/Authentication/OpenAIAuthentication.cs +++ b/OpenAI-DotNet/Authentication/OpenAIAuthentication.cs @@ -11,13 +11,15 @@ namespace OpenAI /// public sealed class OpenAIAuthentication { - private const string OPENAI_KEY = "OPENAI_KEY"; - private const string OPENAI_API_KEY = "OPENAI_API_KEY"; - private const string OPENAI_SECRET_KEY = "OPENAI_SECRET_KEY"; - private const string TEST_OPENAI_SECRET_KEY = "TEST_OPENAI_SECRET_KEY"; - private const string OPENAI_ORGANIZATION_ID = "OPENAI_ORGANIZATION_ID"; - private const string OPEN_AI_ORGANIZATION_ID = "OPEN_AI_ORGANIZATION_ID"; - private const string ORGANIZATION = "ORGANIZATION"; + internal const string CONFIG_FILE = ".openai"; + private const string OPENAI_KEY = nameof(OPENAI_KEY); + private const string OPENAI_API_KEY = nameof(OPENAI_API_KEY); + private const string OPENAI_SECRET_KEY = nameof(OPENAI_SECRET_KEY); + private const string OPENAI_PROJECT_ID = nameof(OPENAI_PROJECT_ID); + private const string OPEN_AI_PROJECT_ID = nameof(OPEN_AI_PROJECT_ID); + private const string TEST_OPENAI_SECRET_KEY = nameof(TEST_OPENAI_SECRET_KEY); + private const string OPENAI_ORGANIZATION_ID = nameof(OPENAI_ORGANIZATION_ID); + private const string OPEN_AI_ORGANIZATION_ID = nameof(OPEN_AI_ORGANIZATION_ID); private readonly AuthInfo authInfo; @@ -32,6 +34,11 @@ public sealed class OpenAIAuthentication /// public string OrganizationId => authInfo.OrganizationId; + /// + /// For users that specify specific projects. + /// + public string ProjectId => authInfo.ProjectId; + /// /// Allows implicit casting from a string, so that a simple string API key can be provided in place of an instance of . /// @@ -54,7 +61,10 @@ public sealed class OpenAIAuthentication /// For users who belong to multiple organizations, you can pass a header to specify which organization is used for an API request. /// Usage from these API requests will count against the specified organization's subscription quota. /// - public OpenAIAuthentication(string apiKey, string organization) => authInfo = new AuthInfo(apiKey, organization); + /// + /// Optional, Project id to specify. + /// + public OpenAIAuthentication(string apiKey, string organization, string projectId = null) => authInfo = new AuthInfo(apiKey, organization, projectId); private static OpenAIAuthentication cachedDefault; @@ -121,12 +131,14 @@ public static OpenAIAuthentication LoadFromEnv(string organizationId = null) organizationId = Environment.GetEnvironmentVariable(OPENAI_ORGANIZATION_ID); } - if (string.IsNullOrWhiteSpace(organizationId)) + var projectId = Environment.GetEnvironmentVariable(OPEN_AI_PROJECT_ID); + + if (string.IsNullOrWhiteSpace(projectId)) { - organizationId = Environment.GetEnvironmentVariable(ORGANIZATION); + projectId = Environment.GetEnvironmentVariable(OPENAI_PROJECT_ID); } - return string.IsNullOrEmpty(apiKey) ? null : new OpenAIAuthentication(apiKey, organizationId); + return string.IsNullOrEmpty(apiKey) ? null : new OpenAIAuthentication(apiKey, organizationId, projectId); } /// @@ -159,7 +171,7 @@ public static OpenAIAuthentication LoadFromPath(string path) /// or if it was not successful in finding a config /// (or if the config file didn't contain correctly formatted API keys) /// - public static OpenAIAuthentication LoadFromDirectory(string directory = null, string filename = ".openai", bool searchUp = true) + public static OpenAIAuthentication LoadFromDirectory(string directory = null, string filename = CONFIG_FILE, bool searchUp = true) { if (string.IsNullOrWhiteSpace(directory)) { @@ -188,6 +200,7 @@ public static OpenAIAuthentication LoadFromDirectory(string directory = null, st var lines = File.ReadAllLines(filePath); string apiKey = null; + string projectId = null; string organization = null; foreach (var line in lines) @@ -207,16 +220,19 @@ public static OpenAIAuthentication LoadFromDirectory(string directory = null, st case TEST_OPENAI_SECRET_KEY: apiKey = nextPart.Trim(); break; - case ORGANIZATION: case OPEN_AI_ORGANIZATION_ID: case OPENAI_ORGANIZATION_ID: organization = nextPart.Trim(); break; + case OPENAI_PROJECT_ID: + case OPEN_AI_PROJECT_ID: + projectId = nextPart.Trim(); + break; } } } - authInfo = new AuthInfo(apiKey, organization); + authInfo = new AuthInfo(apiKey, organization, projectId); } if (searchUp) diff --git a/OpenAI-DotNet/Batch/BatchEndpoint.cs b/OpenAI-DotNet/Batch/BatchEndpoint.cs new file mode 100644 index 00000000..3dcc9e0a --- /dev/null +++ b/OpenAI-DotNet/Batch/BatchEndpoint.cs @@ -0,0 +1,89 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Extensions; +using System; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace OpenAI.Batch +{ + /// + /// Create large batches of API requests for asynchronous processing. + /// The Batch API returns completions within 24 hours for a 50% discount. + /// + /// + public sealed class BatchEndpoint : OpenAIBaseEndpoint + { + public BatchEndpoint(OpenAIClient client) : base(client) { } + + protected override string Root => "batches"; + + /// + /// Creates and executes a batch from an uploaded file of requests. + /// + /// . + /// Optional . + /// . + public async Task CreateBatchAsync(CreateBatchRequest request, CancellationToken cancellationToken = default) + { + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// List your organization's batches. + /// + /// . + /// Optional, . + /// . + public async Task> ListBatchesAsync(ListQuery query = null, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize>(responseAsString, client); + } + + /// + /// Retrieves a batch. + /// + /// The ID of the batch to retrieve. + /// Optional . + /// . + public async Task RetrieveBatchAsync(string batchId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl($"/{batchId}"), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Cancels an in-progress batch. + /// + /// + /// Optional . + /// True, if the batch was cancelled, otherwise false. + public async Task CancelBatchAsync(string batchId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.PostAsync(GetUrl($"/{batchId}/cancel"), null!, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + var batch = response.Deserialize(responseAsString, client); + + if (batch.Status < BatchStatus.Cancelling) + { + try + { + batch = await batch.WaitForStatusChangeAsync(cancellationToken: cancellationToken); + } + catch (Exception) + { + // ignored + } + } + + return batch.Status >= BatchStatus.Cancelling; + } + } +} diff --git a/OpenAI-DotNet/Batch/BatchErrors.cs b/OpenAI-DotNet/Batch/BatchErrors.cs new file mode 100644 index 00000000..aa700c5f --- /dev/null +++ b/OpenAI-DotNet/Batch/BatchErrors.cs @@ -0,0 +1,14 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI.Batch +{ + public sealed class BatchErrors + { + [JsonInclude] + [JsonPropertyName("data")] + public IReadOnlyList Errors { get; private set; } + } +} diff --git a/OpenAI-DotNet/Batch/BatchExtensions.cs b/OpenAI-DotNet/Batch/BatchExtensions.cs new file mode 100644 index 00000000..2c6a60ea --- /dev/null +++ b/OpenAI-DotNet/Batch/BatchExtensions.cs @@ -0,0 +1,44 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace OpenAI.Batch +{ + public static class BatchExtensions + { + /// + /// Get the latest status of the . + /// + /// . + /// Optional, . + /// . + public static async Task UpdateAsync(this BatchResponse batchResponse, CancellationToken cancellationToken = default) + => await batchResponse.Client.BatchEndpoint.RetrieveBatchAsync(batchResponse.Id, cancellationToken).ConfigureAwait(false); + + /// + /// Waits for to change. + /// + /// . + /// Optional, time in milliseconds to wait before polling status. + /// Optional, timeout in seconds to cancel polling.
Defaults to 30 seconds.
Set to -1 for indefinite. + /// Optional, . + /// . + public static async Task WaitForStatusChangeAsync(this BatchResponse batchResponse, int? pollingInterval = null, int? timeout = null, CancellationToken cancellationToken = default) + { + using CancellationTokenSource cts = timeout is < 0 + ? new CancellationTokenSource() + : new CancellationTokenSource(TimeSpan.FromSeconds(timeout ?? 30)); + using var chainedCts = CancellationTokenSource.CreateLinkedTokenSource(cts.Token, cancellationToken); + BatchResponse result; + do + { + await Task.Delay(pollingInterval ?? 500, chainedCts.Token).ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); + result = await batchResponse.UpdateAsync(cancellationToken: chainedCts.Token).ConfigureAwait(false); + } while (result.Status is BatchStatus.NotStarted or BatchStatus.InProgress or BatchStatus.Cancelling); + return result; + } + } +} diff --git a/OpenAI-DotNet/Batch/BatchResponse.cs b/OpenAI-DotNet/Batch/BatchResponse.cs new file mode 100644 index 00000000..7abf8f07 --- /dev/null +++ b/OpenAI-DotNet/Batch/BatchResponse.cs @@ -0,0 +1,194 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Extensions; +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI.Batch +{ + public sealed class BatchResponse : BaseResponse + { + [JsonInclude] + [JsonPropertyName("id")] + public string Id { get; private set; } + + /// + /// The object type, which is always batch. + /// + [JsonInclude] + [JsonPropertyName("object")] + public string Object { get; private set; } + + /// + /// The OpenAI API endpoint used by the batch. + /// + [JsonInclude] + [JsonPropertyName("endpoint")] + public string Endpoint { get; private set; } + + /// + /// Errors that occured during the batch job. + /// + [JsonInclude] + [JsonPropertyName("errors")] + public BatchErrors BatchErrors { get; private set; } + + /// + /// The ID of the input file for the batch. + /// + [JsonInclude] + [JsonPropertyName("input_file_id")] + public string InputFileId { get; private set; } + + /// + /// The time frame within which the batch should be processed. + /// + [JsonInclude] + [JsonPropertyName("completion_window")] + public string CompletionWindow { get; private set; } + + /// + /// The current status of the batch. + /// + [JsonInclude] + [JsonPropertyName("status")] + [JsonConverter(typeof(JsonStringEnumConverter))] + public BatchStatus Status { get; private set; } + + /// + /// The ID of the file containing the outputs of successfully executed requests. + /// + [JsonInclude] + [JsonPropertyName("output_file_id")] + public string OutputFileId { get; private set; } + + /// + /// The ID of the file containing the outputs of requests with errors. + /// + [JsonInclude] + [JsonPropertyName("error_file_id")] + public string ErrorFileId { get; private set; } + + /// + /// The Unix timestamp (in seconds) for when the batch was created. + /// + [JsonInclude] + [JsonPropertyName("created_at")] + public int CreatedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; + + /// + /// The Unix timestamp (in seconds) for when the batch started processing. + /// + [JsonInclude] + [JsonPropertyName("in_progress_at")] + public int? InProgressAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? InProgressAt + => InProgressAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(InProgressAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The Unix timestamp (in seconds) for when the batch will expire. + /// + [JsonInclude] + [JsonPropertyName("expires_at")] + public int? ExpiresAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? ExpiresAt + => ExpiresAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(ExpiresAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The Unix timestamp (in seconds) for when the batch started finalizing. + /// + [JsonInclude] + [JsonPropertyName("finalizing_at")] + public int? FinalizingAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? FinalizingAt + => FinalizingAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(FinalizingAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The Unix timestamp (in seconds) for when the batch was completed. + /// + [JsonInclude] + [JsonPropertyName("completed_at")] + public int? CompletedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? CompletedAt + => CompletedAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(CompletedAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The Unix timestamp (in seconds) for when the batch failed. + /// + [JsonInclude] + [JsonPropertyName("failed_at")] + public int? FailedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? FailedAt + => FailedAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(FailedAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The Unix timestamp (in seconds) for when the batch expired. + /// + [JsonInclude] + [JsonPropertyName("expired_at")] + public int? ExpiredAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? ExpiredAt + => ExpiredAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(ExpiredAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The Unix timestamp (in seconds) for when the batch was cancelled. + /// + [JsonInclude] + [JsonPropertyName("cancelled_at")] + public int? CancelledAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? CancelledAt + => CancelledAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(CancelledAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The request counts for different statuses within the batch. + /// + [JsonInclude] + [JsonPropertyName("request_counts")] + public RequestCounts RequestCounts { get; private set; } + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + [JsonInclude] + [JsonPropertyName("metadata")] + public IReadOnlyDictionary Metadata { get; private set; } + + public override string ToString() => Id; + + public static implicit operator string(BatchResponse response) => response?.ToString(); + } +} diff --git a/OpenAI-DotNet/Batch/BatchStatus.cs b/OpenAI-DotNet/Batch/BatchStatus.cs new file mode 100644 index 00000000..2a42c477 --- /dev/null +++ b/OpenAI-DotNet/Batch/BatchStatus.cs @@ -0,0 +1,27 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Runtime.Serialization; + +namespace OpenAI.Batch +{ + public enum BatchStatus + { + NotStarted = 0, + [EnumMember(Value = "validating")] + Validating, + [EnumMember(Value = "in_progress")] + InProgress, + [EnumMember(Value = "finalizing")] + Finalizing, + [EnumMember(Value = "cancelling")] + Cancelling, + [EnumMember(Value = "cancelled")] + Cancelled, + [EnumMember(Value = "completed")] + Completed, + [EnumMember(Value = "expired")] + Expired, + [EnumMember(Value = "failed")] + Failed, + } +} diff --git a/OpenAI-DotNet/Batch/CreateBatchRequest.cs b/OpenAI-DotNet/Batch/CreateBatchRequest.cs new file mode 100644 index 00000000..de9d9871 --- /dev/null +++ b/OpenAI-DotNet/Batch/CreateBatchRequest.cs @@ -0,0 +1,48 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI.Batch +{ + public sealed class CreateBatchRequest + { + private const string DefaultCompletionWindow = "24h"; + + /// + /// + /// + /// + /// The ID of an uploaded file that contains requests for the new batch. + /// Your input file must be formatted as a JSONL file, and must be uploaded with the purpose batch. + /// The file can contain up to 50,000 requests, and can be up to 100 MB in size. + /// + /// + /// The endpoint to be used for all requests in the batch. + /// Currently, '/v1/chat/completions', '/v1/embeddings', and '/v1/completions' are supported. + /// Note that '/v1/embeddings' batches are also restricted to a maximum of 50,000 embedding inputs across all requests in the batch. + /// + /// + /// Optional custom metadata for the batch. + /// + public CreateBatchRequest(string inputFileId, string endpoint, IReadOnlyDictionary metadata = null) + { + InputFileId = inputFileId; + Endpoint = endpoint; + CompletionWindow = DefaultCompletionWindow; + Metadata = metadata; + } + + [JsonPropertyName("input_file_id")] + public string InputFileId { get; } + + [JsonPropertyName("endpoint")] + public string Endpoint { get; } + + [JsonPropertyName("completion_window")] + public string CompletionWindow { get; } + + [JsonPropertyName("metadata")] + public IReadOnlyDictionary Metadata { get; } + } +} diff --git a/OpenAI-DotNet/Batch/Endpoint.cs b/OpenAI-DotNet/Batch/Endpoint.cs new file mode 100644 index 00000000..e98bfaaf --- /dev/null +++ b/OpenAI-DotNet/Batch/Endpoint.cs @@ -0,0 +1,21 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +namespace OpenAI.Batch +{ + public sealed class Endpoint + { + public const string ChatCompletions = "/v1/chat/completions"; + public const string Embeddings = "/v1/embeddings"; + public const string Completions = "/v1/completions"; + + public Endpoint(string endpoint) => Value = endpoint; + + public string Value { get; } + + public override string ToString() => Value; + + public static implicit operator string(Endpoint endpoint) => endpoint?.ToString(); + + public static implicit operator Endpoint(string endpoint) => new(endpoint); + } +} diff --git a/OpenAI-DotNet/Batch/RequestCounts.cs b/OpenAI-DotNet/Batch/RequestCounts.cs new file mode 100644 index 00000000..13ea7ca5 --- /dev/null +++ b/OpenAI-DotNet/Batch/RequestCounts.cs @@ -0,0 +1,30 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Text.Json.Serialization; + +namespace OpenAI.Batch +{ + public sealed class RequestCounts + { + /// + /// Total number of requests in the batch. + /// + [JsonInclude] + [JsonPropertyName("total")] + public int Total { get; private set; } + + /// + /// Number of requests that have been completed successfully. + /// + [JsonInclude] + [JsonPropertyName("completed")] + public int Completed { get; private set; } + + /// + /// Number of requests that have failed. + /// + [JsonInclude] + [JsonPropertyName("failed")] + public int Failed { get; private set; } + } +} diff --git a/OpenAI-DotNet/Chat/ChatEndpoint.cs b/OpenAI-DotNet/Chat/ChatEndpoint.cs index 8a6d2d5f..d8937b1c 100644 --- a/OpenAI-DotNet/Chat/ChatEndpoint.cs +++ b/OpenAI-DotNet/Chat/ChatEndpoint.cs @@ -34,9 +34,9 @@ public ChatEndpoint(OpenAIClient client) : base(client) { } /// . public async Task GetCompletionAsync(ChatRequest chatRequest, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl("/completions"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl("/completions"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -45,56 +45,23 @@ public async Task GetCompletionAsync(ChatRequest chatRequest, Canc ///
/// The chat request which contains the message content. /// An action to be called as each new result arrives. + /// + /// Optional, If set, an additional chunk will be streamed before the 'data: [DONE]' message. + /// The 'usage' field on this chunk shows the token usage statistics for the entire request, + /// and the 'choices' field will always be an empty array. All other chunks will also include a 'usage' field, + /// but with a null value. + /// /// Optional, . /// . - public async Task StreamCompletionAsync(ChatRequest chatRequest, Action resultHandler, CancellationToken cancellationToken = default) + public async Task StreamCompletionAsync(ChatRequest chatRequest, Action resultHandler, bool streamUsage = false, CancellationToken cancellationToken = default) { chatRequest.Stream = true; - using var jsonContent = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var request = new HttpRequestMessage(HttpMethod.Post, GetUrl("/completions")); - request.Content = jsonContent; - using var response = await client.Client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - await response.CheckResponseAsync(false, jsonContent, null, cancellationToken).ConfigureAwait(false); - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - using var reader = new StreamReader(stream); + chatRequest.StreamOptions = streamUsage ? new StreamOptions() : null; ChatResponse chatResponse = null; - using var responseStream = EnableDebug ? new MemoryStream() : null; - - if (responseStream != null) - { - await responseStream.WriteAsync("["u8.ToArray(), cancellationToken); - } - - while (await reader.ReadLineAsync().ConfigureAwait(false) is { } streamData) + using var payload = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await this.StreamEventsAsync(GetUrl("/completions"), payload, (sseResponse, ssEvent) => { - cancellationToken.ThrowIfCancellationRequested(); - - if (!streamData.TryGetEventStreamData(out var eventData)) - { - // if response stream is not null, remove last comma - responseStream?.SetLength(responseStream.Length - 1); - continue; - } - - if (string.IsNullOrWhiteSpace(eventData)) { continue; } - - if (responseStream != null) - { - string data; - - try - { - data = JsonNode.Parse(eventData)?.ToJsonString(OpenAIClient.JsonSerializationOptions); - } - catch - { - data = $"{{{eventData}}}"; - } - - await responseStream.WriteAsync(Encoding.UTF8.GetBytes($"{data},"), cancellationToken); - } - - var partialResponse = response.Deserialize(eventData, client); + var partialResponse = sseResponse.Deserialize(ssEvent, client); if (chatResponse == null) { @@ -102,21 +69,14 @@ public async Task StreamCompletionAsync(ChatRequest chatRequest, A } else { - chatResponse.CopyFrom(partialResponse); + chatResponse.AppendFrom(partialResponse); } resultHandler?.Invoke(partialResponse); - } - if (responseStream != null) - { - await responseStream.WriteAsync("]"u8.ToArray(), cancellationToken); - } - - await response.CheckResponseAsync(EnableDebug, jsonContent, responseStream, cancellationToken).ConfigureAwait(false); + }, cancellationToken); if (chatResponse == null) { return null; } - chatResponse.SetResponseData(response.Headers, client); resultHandler?.Invoke(chatResponse); return chatResponse; @@ -125,19 +85,26 @@ public async Task StreamCompletionAsync(ChatRequest chatRequest, A /// /// Created a completion for the chat message and stream the results as they come in.
/// If you are not using C# 8 supporting IAsyncEnumerable{T} or if you are using the .NET Framework, - /// you may need to use instead. + /// you may need to use instead. ///
/// The chat request which contains the message content. + /// + /// Optional, If set, an additional chunk will be streamed before the 'data: [DONE]' message. + /// The 'usage' field on this chunk shows the token usage statistics for the entire request, + /// and the 'choices' field will always be an empty array. All other chunks will also include a 'usage' field, + /// but with a null value. + /// /// Optional, . /// . - public async IAsyncEnumerable StreamCompletionEnumerableAsync(ChatRequest chatRequest, [EnumeratorCancellation] CancellationToken cancellationToken = default) + public async IAsyncEnumerable StreamCompletionEnumerableAsync(ChatRequest chatRequest, bool streamUsage = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) { chatRequest.Stream = true; - using var jsonContent = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + chatRequest.StreamOptions = streamUsage ? new StreamOptions() : null; + using var payload = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); using var request = new HttpRequestMessage(HttpMethod.Post, GetUrl("/completions")); - request.Content = jsonContent; + request.Content = payload; using var response = await client.Client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - await response.CheckResponseAsync(false, jsonContent, null, cancellationToken).ConfigureAwait(false); + await response.CheckResponseAsync(false, payload, cancellationToken: cancellationToken).ConfigureAwait(false); await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); using var reader = new StreamReader(stream); ChatResponse chatResponse = null; @@ -185,7 +152,7 @@ public async IAsyncEnumerable StreamCompletionEnumerableAsync(Chat } else { - chatResponse.CopyFrom(partialResponse); + chatResponse.AppendFrom(partialResponse); } yield return partialResponse; @@ -196,7 +163,7 @@ public async IAsyncEnumerable StreamCompletionEnumerableAsync(Chat await responseStream.WriteAsync("]"u8.ToArray(), cancellationToken); } - await response.CheckResponseAsync(EnableDebug, jsonContent, responseStream, cancellationToken).ConfigureAwait(false); + await response.CheckResponseAsync(EnableDebug, payload, responseStream, null, cancellationToken).ConfigureAwait(false); if (chatResponse == null) { yield break; } diff --git a/OpenAI-DotNet/Chat/ChatRequest.cs b/OpenAI-DotNet/Chat/ChatRequest.cs index 0ae121b7..20b2df67 100644 --- a/OpenAI-DotNet/Chat/ChatRequest.cs +++ b/OpenAI-DotNet/Chat/ChatRequest.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Collections.Generic; using System.Linq; @@ -21,14 +22,15 @@ public ChatRequest( int? maxTokens = null, int? number = null, double? presencePenalty = null, - ChatResponseFormat responseFormat = ChatResponseFormat.Text, + ChatResponseFormat responseFormat = ChatResponseFormat.Auto, int? seed = null, string[] stops = null, double? temperature = null, double? topP = null, int? topLogProbs = null, + bool? parallelToolCalls = null, string user = null) - : this(messages, model, frequencyPenalty, logitBias, maxTokens, number, presencePenalty, responseFormat, seed, stops, temperature, topP, topLogProbs, user) + : this(messages, model, frequencyPenalty, logitBias, maxTokens, number, presencePenalty, responseFormat, seed, stops, temperature, topP, topLogProbs, parallelToolCalls, user) { var toolList = tools?.ToList(); @@ -41,6 +43,7 @@ public ChatRequest( else { if (!toolChoice.Equals("none") && + !toolChoice.Equals("required") && !toolChoice.Equals("auto")) { var tool = toolList.FirstOrDefault(t => t.Function.Name.Contains(toolChoice)) ?? @@ -122,6 +125,9 @@ public ChatRequest( /// An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, /// each with an associated log probability. /// + /// + /// Whether to enable parallel function calling during tool use. + /// /// /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. /// @@ -133,12 +139,13 @@ public ChatRequest( int? maxTokens = null, int? number = null, double? presencePenalty = null, - ChatResponseFormat responseFormat = ChatResponseFormat.Text, + ChatResponseFormat responseFormat = ChatResponseFormat.Auto, int? seed = null, string[] stops = null, double? temperature = null, double? topP = null, int? topLogProbs = null, + bool? parallelToolCalls = null, string user = null) { Messages = messages?.ToList(); @@ -148,19 +155,20 @@ public ChatRequest( throw new ArgumentNullException(nameof(messages), $"Missing required {nameof(messages)} parameter"); } - Model = string.IsNullOrWhiteSpace(model) ? Models.Model.GPT3_5_Turbo : model; + Model = string.IsNullOrWhiteSpace(model) ? Models.Model.GPT4o : model; FrequencyPenalty = frequencyPenalty; LogitBias = logitBias; MaxTokens = maxTokens; Number = number; PresencePenalty = presencePenalty; - ResponseFormat = ChatResponseFormat.Json == responseFormat ? responseFormat : null; + ResponseFormat = responseFormat; Seed = seed; Stops = stops; Temperature = temperature; TopP = topP; LogProbs = topLogProbs.HasValue ? topLogProbs.Value > 0 : null; TopLogProbs = topLogProbs; + ParallelToolCalls = parallelToolCalls; User = user; } @@ -255,8 +263,9 @@ public ChatRequest( /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. /// [JsonPropertyName("response_format")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ResponseFormat ResponseFormat { get; } + [JsonConverter(typeof(ResponseFormatConverter))] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ChatResponseFormat ResponseFormat { get; } /// /// This feature is in Beta. If specified, our system will make a best effort to sample deterministically, @@ -282,6 +291,10 @@ public ChatRequest( [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public bool Stream { get; internal set; } + [JsonPropertyName("stream_options")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public StreamOptions StreamOptions { get; internal set; } + /// /// What sampling temperature to use, between 0 and 2. /// Higher values like 0.8 will make the output more random, while lower values like 0.2 will @@ -322,27 +335,16 @@ public ChatRequest( public dynamic ToolChoice { get; } /// - /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. + /// Whether to enable parallel function calling during tool use. /// - [JsonPropertyName("user")] - public string User { get; } + [JsonPropertyName("parallel_tool_calls")] + public bool? ParallelToolCalls { get; } /// - /// Pass "auto" to let the OpenAI service decide, "none" if none are to be called, - /// or "functionName" to force function call. Defaults to "auto". - /// - [Obsolete("Use ToolChoice")] - [JsonPropertyName("function_call")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public dynamic FunctionCall { get; } - - /// - /// An optional list of functions to get arguments for. + /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. /// - [Obsolete("Use Tools")] - [JsonPropertyName("functions")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public IReadOnlyList Functions { get; } + [JsonPropertyName("user")] + public string User { get; } /// public override string ToString() => JsonSerializer.Serialize(this, OpenAIClient.JsonSerializationOptions); diff --git a/OpenAI-DotNet/Chat/ChatResponse.cs b/OpenAI-DotNet/Chat/ChatResponse.cs index 4491ee00..f26a71da 100644 --- a/OpenAI-DotNet/Chat/ChatResponse.cs +++ b/OpenAI-DotNet/Chat/ChatResponse.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Collections.Generic; using System.Linq; @@ -7,11 +8,11 @@ namespace OpenAI.Chat { - public sealed class ChatResponse : BaseResponse + public sealed class ChatResponse : BaseResponse, IServerSentEvent { public ChatResponse() { } - internal ChatResponse(ChatResponse other) => CopyFrom(other); + internal ChatResponse(ChatResponse other) => AppendFrom(other); /// /// A unique identifier for the chat completion. @@ -24,9 +25,6 @@ public ChatResponse() { } [JsonPropertyName("object")] public string Object { get; private set; } - [Obsolete("Use CreatedAtUnixTimeSeconds")] - public int Created => CreatedAtUnixTimeSeconds; - /// /// The Unix timestamp (in seconds) of when the chat completion was created. /// @@ -65,7 +63,7 @@ public ChatResponse() { } public IReadOnlyList Choices { get => choices; - private set => choices = value.ToList(); + private set => choices = value?.ToList(); } [JsonIgnore] @@ -75,24 +73,26 @@ public IReadOnlyList Choices public static implicit operator string(ChatResponse response) => response?.ToString(); - internal void CopyFrom(ChatResponse other) + internal void AppendFrom(ChatResponse other) { - if (!string.IsNullOrWhiteSpace(other?.Id)) + if (other is null) { return; } + + if (!string.IsNullOrWhiteSpace(other.Id)) { Id = other.Id; } - if (!string.IsNullOrWhiteSpace(other?.Object)) + if (!string.IsNullOrWhiteSpace(other.Object)) { Object = other.Object; } - if (!string.IsNullOrWhiteSpace(other?.Model)) + if (!string.IsNullOrWhiteSpace(other.Model)) { Model = other.Model; } - if (other?.Usage != null) + if (other.Usage != null) { if (Usage == null) { @@ -100,28 +100,21 @@ internal void CopyFrom(ChatResponse other) } else { - Usage.CopyFrom(other.Usage); + Usage.AppendFrom(other.Usage); } } - if (other?.Choices is { Count: > 0 }) + if (other.Choices is { Count: > 0 }) { choices ??= new List(); - - foreach (var otherChoice in other.Choices) - { - if (otherChoice.Index + 1 > choices.Count) - { - choices.Insert(otherChoice.Index, otherChoice); - } - - choices[otherChoice.Index].CopyFrom(otherChoice); - } + choices.AppendFrom(other.Choices); } } public string GetUsage(bool log = true) { + if (Usage == null) { return string.Empty; } + var message = $"{Id} | {Model} | {Usage}"; if (log) diff --git a/OpenAI-DotNet/Chat/Choice.cs b/OpenAI-DotNet/Chat/Choice.cs index d2b20486..fbda2243 100644 --- a/OpenAI-DotNet/Chat/Choice.cs +++ b/OpenAI-DotNet/Chat/Choice.cs @@ -1,10 +1,11 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System.Text.Json.Serialization; namespace OpenAI.Chat { - public sealed class Choice + public sealed class Choice : IAppendable { /// /// A chat completion message generated by the model. @@ -42,7 +43,7 @@ public sealed class Choice /// [JsonInclude] [JsonPropertyName("index")] - public int Index { get; private set; } + public int? Index { get; private set; } /// /// Log probability information for the choice. @@ -55,7 +56,7 @@ public sealed class Choice public static implicit operator string(Choice choice) => choice?.ToString(); - internal void CopyFrom(Choice other) + public void AppendFrom(Choice other) { Index = other?.Index ?? 0; @@ -72,7 +73,7 @@ internal void CopyFrom(Choice other) } else { - Message.CopyFrom(other.Delta); + Message.AppendFrom(other.Delta); } } diff --git a/OpenAI-DotNet/Chat/Content.cs b/OpenAI-DotNet/Chat/Content.cs deleted file mode 100644 index 431e8dee..00000000 --- a/OpenAI-DotNet/Chat/Content.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System.Text.Json.Serialization; -using OpenAI.Extensions; - -namespace OpenAI.Chat -{ - public sealed class Content - { - public Content() { } - - public Content(ImageUrl imageUrl) - { - Type = ContentType.ImageUrl; - ImageUrl = imageUrl; - } - - public Content(string input) - { - Type = ContentType.Text; - Text = input; - } - - public Content(ContentType type, string input) - { - Type = type; - - switch (Type) - { - case ContentType.Text: - Text = input; - break; - case ContentType.ImageUrl: - ImageUrl = new ImageUrl(input); - break; - } - } - - [JsonInclude] - [JsonPropertyName("type")] - [JsonConverter(typeof(JsonStringEnumConverter))] - public ContentType Type { get; private set; } - - [JsonInclude] - [JsonPropertyName("text")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public string Text { get; private set; } - - [JsonInclude] - [JsonPropertyName("image_url")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImageUrl ImageUrl { get; private set; } - - public static implicit operator Content(string input) => new(ContentType.Text, input); - - public static implicit operator Content(ImageUrl imageUrl) => new(imageUrl); - } -} \ No newline at end of file diff --git a/OpenAI-DotNet/Chat/Conversation.cs b/OpenAI-DotNet/Chat/Conversation.cs index b228c481..bf03f14b 100644 --- a/OpenAI-DotNet/Chat/Conversation.cs +++ b/OpenAI-DotNet/Chat/Conversation.cs @@ -29,4 +29,4 @@ public Conversation(List messages) public static implicit operator string(Conversation conversation) => conversation?.ToString(); } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Chat/FinishDetails.cs b/OpenAI-DotNet/Chat/FinishDetails.cs index b917237e..b41d1f25 100644 --- a/OpenAI-DotNet/Chat/FinishDetails.cs +++ b/OpenAI-DotNet/Chat/FinishDetails.cs @@ -14,4 +14,4 @@ public sealed class FinishDetails public static implicit operator string(FinishDetails details) => details?.ToString(); } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Chat/LogProbInfo.cs b/OpenAI-DotNet/Chat/LogProbInfo.cs index 87a9d41e..0d8acd96 100644 --- a/OpenAI-DotNet/Chat/LogProbInfo.cs +++ b/OpenAI-DotNet/Chat/LogProbInfo.cs @@ -42,4 +42,4 @@ public sealed class LogProbInfo [JsonPropertyName("top_logprobs")] public IReadOnlyList TopLogProbs { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Chat/LogProbs.cs b/OpenAI-DotNet/Chat/LogProbs.cs index 855c20aa..266dcbab 100644 --- a/OpenAI-DotNet/Chat/LogProbs.cs +++ b/OpenAI-DotNet/Chat/LogProbs.cs @@ -17,4 +17,4 @@ public sealed class LogProbs [JsonPropertyName("content")] public IReadOnlyList Content { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Chat/Message.cs b/OpenAI-DotNet/Chat/Message.cs index 0917eb98..c135520b 100644 --- a/OpenAI-DotNet/Chat/Message.cs +++ b/OpenAI-DotNet/Chat/Message.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Collections.Generic; using System.Linq; @@ -9,17 +10,9 @@ namespace OpenAI.Chat { public sealed class Message { - internal Message(Delta other) => CopyFrom(other); - public Message() { } - [Obsolete("Use new constructor args")] - public Message(Role role, string content, string name, Function function) - : this(role, content, name) - { - Name = name; - Function = function; - } + internal Message(Delta other) => AppendFrom(other); /// /// Creates a new message to insert into a chat conversation. @@ -34,7 +27,7 @@ public Message(Role role, string content, string name, Function function) public Message(Role role, IEnumerable content, string name = null) { Role = role; - Content = content.ToList(); + Content = content?.ToList(); Name = name; } @@ -73,6 +66,27 @@ public Message(Tool tool, IEnumerable content) ToolCallId = tool.Id; } + /// + /// Creates a new message to insert into a chat conversation. + /// + /// The tool_call_id to use for the message. + /// Name of the function call. + /// Tool function response. + public Message(string toolCallId, string toolFunctionName, IEnumerable content) + : this(Role.Tool, content, toolFunctionName) + { + ToolCallId = toolCallId; + } + + /// + /// Optional, The name of the author of this message.
+ /// May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. + ///
+ [JsonInclude] + [JsonPropertyName("name")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public string Name { get; private set; } + /// /// The of the author of this message. /// @@ -99,7 +113,7 @@ public Message(Tool tool, IEnumerable content) public IReadOnlyList ToolCalls { get => toolCalls; - private set => toolCalls = value.ToList(); + private set => toolCalls = value?.ToList(); } [JsonInclude] @@ -116,20 +130,11 @@ public IReadOnlyList ToolCalls [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public Function Function { get; private set; } - /// - /// Optional, The name of the author of this message.
- /// May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. - ///
- [JsonInclude] - [JsonPropertyName("name")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public string Name { get; private set; } - public override string ToString() => Content?.ToString() ?? string.Empty; public static implicit operator string(Message message) => message?.ToString(); - internal void CopyFrom(Delta other) + internal void AppendFrom(Delta other) { if (Role == 0 && other?.Role > 0) @@ -150,40 +155,8 @@ internal void CopyFrom(Delta other) if (other is { ToolCalls: not null }) { toolCalls ??= new List(); - - foreach (var otherToolCall in other.ToolCalls) - { - if (otherToolCall == null) { continue; } - - if (otherToolCall.Index.HasValue) - { - if (otherToolCall.Index + 1 > toolCalls.Count) - { - toolCalls.Insert(otherToolCall.Index.Value, new Tool(otherToolCall)); - } - - toolCalls[otherToolCall.Index.Value].CopyFrom(otherToolCall); - } - else - { - toolCalls.Add(new Tool(otherToolCall)); - } - } - } - -#pragma warning disable CS0618 // Type or member is obsolete - if (other?.Function != null) - { - if (Function == null) - { - Function = new Function(other.Function); - } - else - { - Function.CopyFrom(other.Function); - } + toolCalls.AppendFrom(other.ToolCalls); } -#pragma warning restore CS0618 // Type or member is obsolete } } } diff --git a/OpenAI-DotNet/Chat/ResponseFormat.cs b/OpenAI-DotNet/Chat/ResponseFormat.cs deleted file mode 100644 index aac7d158..00000000 --- a/OpenAI-DotNet/Chat/ResponseFormat.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System.Text.Json.Serialization; -using OpenAI.Extensions; - -namespace OpenAI.Chat -{ - public sealed class ResponseFormat - { - public ResponseFormat() => Type = ChatResponseFormat.Text; - - public ResponseFormat(ChatResponseFormat format) => Type = format; - - [JsonInclude] - [JsonPropertyName("type")] - [JsonConverter(typeof(JsonStringEnumConverter))] - public ChatResponseFormat Type { get; private set; } - - public static implicit operator ChatResponseFormat(ResponseFormat format) => format.Type; - - public static implicit operator ResponseFormat(ChatResponseFormat format) => new(format); - } -} \ No newline at end of file diff --git a/OpenAI-DotNet/Threads/Annotation.cs b/OpenAI-DotNet/Common/Annotation.cs similarity index 63% rename from OpenAI-DotNet/Threads/Annotation.cs rename to OpenAI-DotNet/Common/Annotation.cs index 586416d4..391b2b7a 100644 --- a/OpenAI-DotNet/Threads/Annotation.cs +++ b/OpenAI-DotNet/Common/Annotation.cs @@ -3,10 +3,14 @@ using OpenAI.Extensions; using System.Text.Json.Serialization; -namespace OpenAI.Threads +namespace OpenAI { - public sealed class Annotation + public sealed class Annotation : IAppendable { + [JsonInclude] + [JsonPropertyName("index")] + public int? Index { get; private set; } + [JsonInclude] [JsonPropertyName("type")] [JsonConverter(typeof(JsonStringEnumConverter))] @@ -42,5 +46,35 @@ public sealed class Annotation [JsonInclude] [JsonPropertyName("end_index")] public int EndIndex { get; private set; } + + public void AppendFrom(Annotation other) + { + if (other == null) { return; } + + if (!string.IsNullOrWhiteSpace(other.Text)) + { + Text += other.Text; + } + + if (other.FileCitation != null) + { + FileCitation = other.FileCitation; + } + + if (other.FilePath != null) + { + FilePath = other.FilePath; + } + + if (other.StartIndex > 0) + { + StartIndex = other.StartIndex; + } + + if (other.EndIndex > 0) + { + EndIndex = other.EndIndex; + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/AnnotationType.cs b/OpenAI-DotNet/Common/AnnotationType.cs index e767c346..c672f90e 100644 --- a/OpenAI-DotNet/Common/AnnotationType.cs +++ b/OpenAI-DotNet/Common/AnnotationType.cs @@ -11,4 +11,4 @@ public enum AnnotationType [EnumMember(Value = "file_path")] FilePath } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/BaseResponse.cs b/OpenAI-DotNet/Common/BaseResponse.cs index 18509460..243dda07 100644 --- a/OpenAI-DotNet/Common/BaseResponse.cs +++ b/OpenAI-DotNet/Common/BaseResponse.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Text.Json.Serialization; @@ -40,7 +41,6 @@ public abstract class BaseResponse /// /// The maximum number of requests that are permitted before exhausting the rate limit. /// - [JsonIgnore] public int? LimitRequests { get; internal set; } @@ -73,5 +73,8 @@ public abstract class BaseResponse ///
[JsonIgnore] public string ResetTokens { get; internal set; } + + public string ToJsonString() + => this.ToEscapedJsonString(); } } diff --git a/OpenAI-DotNet/Chat/ChatResponseFormat.cs b/OpenAI-DotNet/Common/ChatResponseFormat.cs similarity index 88% rename from OpenAI-DotNet/Chat/ChatResponseFormat.cs rename to OpenAI-DotNet/Common/ChatResponseFormat.cs index 28ab467c..c9294946 100644 --- a/OpenAI-DotNet/Chat/ChatResponseFormat.cs +++ b/OpenAI-DotNet/Common/ChatResponseFormat.cs @@ -2,13 +2,14 @@ using System.Runtime.Serialization; -namespace OpenAI.Chat +namespace OpenAI { public enum ChatResponseFormat { + Auto = 0, [EnumMember(Value = "text")] Text, [EnumMember(Value = "json_object")] Json } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/CodeInterpreterResources.cs b/OpenAI-DotNet/Common/CodeInterpreterResources.cs new file mode 100644 index 00000000..91771cc2 --- /dev/null +++ b/OpenAI-DotNet/Common/CodeInterpreterResources.cs @@ -0,0 +1,35 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI +{ + /// + /// resources. + /// + public sealed class CodeInterpreterResources + { + /// + /// Constructor. + /// + /// + /// A list of file IDs made available to the tool. + /// There can be a maximum of 20 files associated with the tool. + /// + public CodeInterpreterResources(IReadOnlyList fileIds) + { + FileIds = fileIds; + } + + /// + /// A list of file IDs made available to the tool. + /// There can be a maximum of 20 files associated with the tool. + /// + [JsonInclude] + [JsonPropertyName("file_ids")] + public IReadOnlyList FileIds { get; private set; } + + public static implicit operator CodeInterpreterResources(List fileIds) => new(fileIds); + } +} diff --git a/OpenAI-DotNet/Common/Content.cs b/OpenAI-DotNet/Common/Content.cs new file mode 100644 index 00000000..1e71fb6e --- /dev/null +++ b/OpenAI-DotNet/Common/Content.cs @@ -0,0 +1,150 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Extensions; +using System; +using System.Text.Json.Serialization; + +namespace OpenAI +{ + public sealed class Content : IAppendable + { + public Content() { } + + public Content(string text) + : this(ContentType.Text, text) + { + } + + public Content(TextContent textContent) + { + Type = ContentType.Text; + Text = textContent; + } + + public Content(ImageUrl imageUrl) + { + Type = ContentType.ImageUrl; + ImageUrl = imageUrl; + } + + public Content(ImageFile imageFile) + { + Type = ContentType.ImageFile; + ImageFile = imageFile; + } + + public Content(ContentType type, string input) + { + Type = type; + + switch (Type) + { + case ContentType.Text: + Text = input; + break; + case ContentType.ImageUrl: + ImageUrl = new ImageUrl(input); + break; + case ContentType.ImageFile: + throw new ArgumentException("Use the ImageFile constructor for ImageFile content."); + default: + throw new ArgumentOutOfRangeException(nameof(type)); + } + } + + [JsonInclude] + [JsonPropertyName("index")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? Index { get; } + + [JsonInclude] + [JsonPropertyName("type")] + [JsonConverter(typeof(JsonStringEnumConverter))] + [JsonIgnore(Condition = JsonIgnoreCondition.Never)] + public ContentType Type { get; private set; } + + [JsonInclude] + [JsonPropertyName("text")] + [JsonConverter(typeof(StringOrObjectConverter))] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public object Text { get; private set; } + + [JsonInclude] + [JsonPropertyName("image_url")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImageUrl ImageUrl { get; private set; } + + [JsonInclude] + [JsonPropertyName("image_file")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImageFile ImageFile { get; private set; } + + public static implicit operator Content(string input) => new(ContentType.Text, input); + + public static implicit operator Content(ImageUrl imageUrl) => new(imageUrl); + + public static implicit operator Content(ImageFile imageFile) => new(imageFile); + + public override string ToString() + => Type switch + { + ContentType.Text => Text?.ToString(), + ContentType.ImageUrl => ImageUrl?.ToString(), + ContentType.ImageFile => ImageFile?.ToString(), + _ => throw new ArgumentOutOfRangeException(nameof(Type)) + } ?? string.Empty; + + public void AppendFrom(Content other) + { + if (other == null) { return; } + + if (other.Type > 0) + { + Type = other.Type; + } + + if (other.Text is TextContent otherTextContent) + { + if (Text is TextContent textContent) + { + textContent.AppendFrom(otherTextContent); + } + else + { + Text = otherTextContent; + } + } + else if (other.Text is string otherStringContent) + { + if (!string.IsNullOrWhiteSpace(otherStringContent)) + { + Text += otherStringContent; + } + } + + if (other.ImageUrl != null) + { + if (ImageUrl == null) + { + ImageUrl = other.ImageUrl; + } + else + { + ImageUrl.AppendFrom(other.ImageUrl); + } + } + + if (other.ImageFile != null) + { + if (ImageFile == null) + { + ImageFile = other.ImageFile; + } + else + { + ImageFile.AppendFrom(other.ImageFile); + } + } + } + } +} diff --git a/OpenAI-DotNet/Common/ContentType.cs b/OpenAI-DotNet/Common/ContentType.cs index 5129d303..be2427f0 100644 --- a/OpenAI-DotNet/Common/ContentType.cs +++ b/OpenAI-DotNet/Common/ContentType.cs @@ -13,4 +13,4 @@ public enum ContentType [EnumMember(Value = "image_file")] ImageFile } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/DeletedResponse.cs b/OpenAI-DotNet/Common/DeletedResponse.cs index 01558379..2ed4b4bf 100644 --- a/OpenAI-DotNet/Common/DeletedResponse.cs +++ b/OpenAI-DotNet/Common/DeletedResponse.cs @@ -4,7 +4,7 @@ namespace OpenAI { - internal sealed class DeletedResponse + internal sealed class DeletedResponse : BaseResponse { [JsonInclude] [JsonPropertyName("id")] @@ -18,4 +18,4 @@ internal sealed class DeletedResponse [JsonPropertyName("deleted")] public bool Deleted { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/Error.cs b/OpenAI-DotNet/Common/Error.cs index d6664633..b6eabec1 100644 --- a/OpenAI-DotNet/Common/Error.cs +++ b/OpenAI-DotNet/Common/Error.cs @@ -1,23 +1,81 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using System.Text; using System.Text.Json.Serialization; namespace OpenAI { - public sealed class Error + public sealed class Error : BaseResponse, IServerSentEvent { /// - /// One of server_error or rate_limit_exceeded. + /// An error code identifying the error type. /// [JsonInclude] [JsonPropertyName("code")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public string Code { get; private set; } /// - /// A human-readable description of the error. + /// A human-readable message providing more details about the error. /// [JsonInclude] [JsonPropertyName("message")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public string Message { get; private set; } + + /// + /// The name of the parameter that caused the error, if applicable. + /// + [JsonInclude] + [JsonPropertyName("param")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public string Parameter { get; private set; } + + /// + /// The type. + /// + [JsonInclude] + [JsonPropertyName("type")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public string Type { get; private set; } + + /// + /// The line number of the input file where the error occurred, if applicable. + /// + [JsonInclude] + [JsonPropertyName("line")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? Line { get; private set; } + + [JsonIgnore] + public string Object => "error"; + + public override string ToString() + { + var builder = new StringBuilder(); + builder.Append($"[{Code}]"); + + if (!string.IsNullOrEmpty(Message)) + { + builder.Append($" {Message}"); + } + + if (!string.IsNullOrEmpty(Type)) + { + builder.Append($" Type: {Type}"); + } + + if (!string.IsNullOrEmpty(Parameter)) + { + builder.Append($" Parameter: {Parameter}"); + } + + if (Line.HasValue) + { + builder.Append($" Line: {Line.Value}"); + } + + return builder.ToString(); + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/Event.cs b/OpenAI-DotNet/Common/Event.cs deleted file mode 100644 index 049dddb4..00000000 --- a/OpenAI-DotNet/Common/Event.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System; -using System.Text.Json.Serialization; - -namespace OpenAI -{ - [Obsolete("use EventResponse")] - public sealed class Event : BaseResponse - { - [JsonInclude] - [JsonPropertyName("object")] - public string Object { get; private set; } - - [JsonInclude] - [JsonPropertyName("created_at")] - public int CreatedAtUnixTimeSeconds { get; private set; } - - [Obsolete("use CreatedAtUnixTimeSeconds")] - public int CreatedAtUnixTime => CreatedAtUnixTimeSeconds; - - [JsonIgnore] - public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; - - [JsonInclude] - [JsonPropertyName("level")] - public string Level { get; private set; } - - [JsonInclude] - [JsonPropertyName("message")] - public string Message { get; private set; } - - public static implicit operator EventResponse(Event @event) => new(@event); - } -} diff --git a/OpenAI-DotNet/Common/EventResponse.cs b/OpenAI-DotNet/Common/EventResponse.cs index dd693593..8a1c159b 100644 --- a/OpenAI-DotNet/Common/EventResponse.cs +++ b/OpenAI-DotNet/Common/EventResponse.cs @@ -9,16 +9,6 @@ public sealed class EventResponse : BaseResponse { public EventResponse() { } -#pragma warning disable CS0618 // Type or member is obsolete - internal EventResponse(Event @event) - { - Object = @event.Object; - CreatedAtUnixTimeSeconds = @event.CreatedAtUnixTimeSeconds; - Level = @event.Level; - Message = @event.Message; - } -#pragma warning restore CS0618 // Type or member is obsolete - [JsonInclude] [JsonPropertyName("object")] public string Object { get; private set; } @@ -38,4 +28,4 @@ internal EventResponse(Event @event) [JsonPropertyName("message")] public string Message { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/FileCitation.cs b/OpenAI-DotNet/Common/FileCitation.cs similarity index 96% rename from OpenAI-DotNet/Threads/FileCitation.cs rename to OpenAI-DotNet/Common/FileCitation.cs index 80c74543..413421b3 100644 --- a/OpenAI-DotNet/Threads/FileCitation.cs +++ b/OpenAI-DotNet/Common/FileCitation.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace OpenAI.Threads +namespace OpenAI { public sealed class FileCitation { diff --git a/OpenAI-DotNet/Threads/FilePath.cs b/OpenAI-DotNet/Common/FilePath.cs similarity index 93% rename from OpenAI-DotNet/Threads/FilePath.cs rename to OpenAI-DotNet/Common/FilePath.cs index 2d476156..20050700 100644 --- a/OpenAI-DotNet/Threads/FilePath.cs +++ b/OpenAI-DotNet/Common/FilePath.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace OpenAI.Threads +namespace OpenAI { public sealed class FilePath { @@ -13,4 +13,4 @@ public sealed class FilePath [JsonPropertyName("file_id")] public string FileId { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/FileSearchResources.cs b/OpenAI-DotNet/Common/FileSearchResources.cs new file mode 100644 index 00000000..f3da1460 --- /dev/null +++ b/OpenAI-DotNet/Common/FileSearchResources.cs @@ -0,0 +1,47 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI +{ + /// + /// resources. + /// + public sealed class FileSearchResources + { + public FileSearchResources() { } + + /// + /// The vector store attached to this assistant/thread. + /// There can be a maximum of 1 vector store attached to the assistant/thread. + /// + /// + public FileSearchResources(string vectorStoreId = null) + { + VectorStoreIds = new List { vectorStoreId }; + } + + /// + /// A helper to create a vector store with file_ids and attach it to an assistant/thread. + /// There can be a maximum of 1 vector store attached to the assistant/thread. + /// + /// . + public FileSearchResources(VectorStoreRequest vectorStore = null) + { + VectorStores = new List { vectorStore }; + } + + [JsonInclude] + [JsonPropertyName("vector_store_ids")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList VectorStoreIds { get; private set; } + + [JsonInclude] + [JsonPropertyName("vector_stores")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList VectorStores { get; private set; } + + public static implicit operator FileSearchResources(VectorStoreRequest request) => new(request); + } +} diff --git a/OpenAI-DotNet/Common/Function.cs b/OpenAI-DotNet/Common/Function.cs index cec092ee..613b7ace 100644 --- a/OpenAI-DotNet/Common/Function.cs +++ b/OpenAI-DotNet/Common/Function.cs @@ -8,6 +8,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using System.Text.Json.Serialization; +using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; @@ -37,20 +38,14 @@ public Function() { } /// public Function(string name, string description = null, JsonNode parameters = null) { - if (!System.Text.RegularExpressions.Regex.IsMatch(name, NameRegex)) + if (!Regex.IsMatch(name, NameRegex)) { throw new ArgumentException($"The name of the function does not conform to naming standards: {NameRegex}"); } - if (functionCache.ContainsKey(name)) - { - throw new ArgumentException($"The function \"{name}\" is already registered."); - } - Name = name; Description = description; Parameters = parameters; - functionCache[Name] = this; } /// @@ -68,25 +63,19 @@ public Function(string name, string description = null, JsonNode parameters = nu /// public Function(string name, string description, string parameters) { - if (!System.Text.RegularExpressions.Regex.IsMatch(name, NameRegex)) + if (!Regex.IsMatch(name, NameRegex)) { throw new ArgumentException($"The name of the function does not conform to naming standards: {NameRegex}"); } - if (functionCache.ContainsKey(name)) - { - throw new ArgumentException($"The function \"{name}\" is already registered."); - } - Name = name; Description = description; Parameters = JsonNode.Parse(parameters); - functionCache[Name] = this; } - internal Function(string name, string description, MethodInfo method, object instance = null) + private Function(string name, string description, MethodInfo method, object instance = null) { - if (!System.Text.RegularExpressions.Regex.IsMatch(name, NameRegex)) + if (!Regex.IsMatch(name, NameRegex)) { throw new ArgumentException($"The name of the function does not conform to naming standards: {NameRegex}"); } @@ -112,44 +101,44 @@ internal static Function GetOrCreateFunction(string name, string description, Me #region Func<,> Overloads public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); public static Function FromFunc(string name, Func function, string description = null) - => new(name, description, function.Method, function.Target); + => GetOrCreateFunction(name, description, function.Method, function.Target); #endregion Func<,> Overloads - internal Function(Function other) => CopyFrom(other); + internal Function(Function other) => AppendFrom(other); /// /// The name of the function to generate arguments for.
@@ -228,7 +217,7 @@ public JsonNode Arguments [JsonIgnore] private MethodInfo MethodInfo { get; } - internal void CopyFrom(Function other) + internal void AppendFrom(Function other) { if (!string.IsNullOrWhiteSpace(other.Name)) { @@ -257,6 +246,8 @@ internal void CopyFrom(Function other) internal static void ClearFunctionCache() => functionCache.Clear(); + internal static bool TryRemoveFunction(string name) => functionCache.TryRemove(name, out _); + /// /// Invokes the function and returns the result as json. /// @@ -379,7 +370,7 @@ public async Task InvokeAsync(CancellationToken cancellationToken = defaul if (function.MethodInfo == null) { - throw new InvalidOperationException($"Failed to find a valid method for {Name}"); + throw new InvalidOperationException($"Failed to find a valid method to invoke for {Name}"); } var requestedArgs = arguments != null diff --git a/OpenAI-DotNet/Common/FunctionAttribute.cs b/OpenAI-DotNet/Common/FunctionAttribute.cs index 9e2bbe73..f56a3418 100644 --- a/OpenAI-DotNet/Common/FunctionAttribute.cs +++ b/OpenAI-DotNet/Common/FunctionAttribute.cs @@ -14,4 +14,4 @@ public FunctionAttribute(string description = null) public string Description { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/FunctionParameterAttribute.cs b/OpenAI-DotNet/Common/FunctionParameterAttribute.cs index dcbb8be6..cf6234da 100644 --- a/OpenAI-DotNet/Common/FunctionParameterAttribute.cs +++ b/OpenAI-DotNet/Common/FunctionParameterAttribute.cs @@ -18,4 +18,4 @@ public FunctionParameterAttribute(string description) public string Description { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/IListResponse.cs b/OpenAI-DotNet/Common/IListResponse.cs index 2a9c1f1c..af103554 100644 --- a/OpenAI-DotNet/Common/IListResponse.cs +++ b/OpenAI-DotNet/Common/IListResponse.cs @@ -9,4 +9,4 @@ public interface IListResponse { IReadOnlyList Items { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/IServerSentEvent.cs b/OpenAI-DotNet/Common/IServerSentEvent.cs new file mode 100644 index 00000000..e29d5962 --- /dev/null +++ b/OpenAI-DotNet/Common/IServerSentEvent.cs @@ -0,0 +1,14 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +namespace OpenAI +{ + /// + /// Common interface for streaming server sent events + /// + public interface IServerSentEvent + { + string Object { get; } + + string ToJsonString(); + } +} diff --git a/OpenAI-DotNet/Common/ImageDetail.cs b/OpenAI-DotNet/Common/ImageDetail.cs index 91c0e2f1..d07031f1 100644 --- a/OpenAI-DotNet/Common/ImageDetail.cs +++ b/OpenAI-DotNet/Common/ImageDetail.cs @@ -13,4 +13,4 @@ public enum ImageDetail [EnumMember(Value = "high")] High } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/ImageFile.cs b/OpenAI-DotNet/Common/ImageFile.cs index 6a4a0ac8..6da5f9e6 100644 --- a/OpenAI-DotNet/Common/ImageFile.cs +++ b/OpenAI-DotNet/Common/ImageFile.cs @@ -1,17 +1,70 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System.Text.Json.Serialization; namespace OpenAI { - public sealed class ImageFile + /// + /// References an image file in the content of a message. + /// + public sealed class ImageFile : IAppendable { /// - /// The file ID of the image. + /// Constructor. + /// + /// + /// The file ID of the image in the message content. + /// Set purpose='vision' when uploading the file if you need to later display the file content. + /// + /// + /// Specifies the detail level of the image if specified by the user. + /// 'low' uses fewer tokens, you can opt in to high resolution using 'high'. + /// + public ImageFile(string fileId, ImageDetail detail = ImageDetail.Auto) + { + FileId = fileId; + Detail = detail; + } + + [JsonInclude] + [JsonPropertyName("index")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? Index { get; } + + /// + /// The file ID of the image in the message content. + /// Set purpose='vision' when uploading the file if you need to later display the file content. /// [JsonInclude] [JsonPropertyName("file_id")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public string FileId { get; private set; } + + /// + /// Specifies the detail level of the image if specified by the user. + /// 'low' uses fewer tokens, you can opt in to high resolution using 'high'. + /// + [JsonInclude] + [JsonPropertyName("detail")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImageDetail Detail { get; private set; } + + public override string ToString() => FileId; + + public void AppendFrom(ImageFile other) + { + if (other == null) { return; } + + if (!string.IsNullOrWhiteSpace(other.FileId)) + { + FileId = other.FileId; + } + + if (other.Detail > 0) + { + Detail = other.Detail; + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/ImageUrl.cs b/OpenAI-DotNet/Common/ImageUrl.cs index c52fca39..be8722a7 100644 --- a/OpenAI-DotNet/Common/ImageUrl.cs +++ b/OpenAI-DotNet/Common/ImageUrl.cs @@ -1,26 +1,68 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System.Text.Json.Serialization; namespace OpenAI { - public sealed class ImageUrl + /// + /// References an image URL in the content of a message. + /// + public sealed class ImageUrl : IAppendable { - [JsonConstructor] + /// + /// Constructor. + /// + /// + /// The external URL of the image, must be a supported image types: jpeg, jpg, png, gif, webp. + /// + /// + /// Specifies the detail level of the image if specified by the user. + /// 'low' uses fewer tokens, you can opt in to high resolution using 'high'. + /// public ImageUrl(string url, ImageDetail detail = ImageDetail.Auto) { Url = url; Detail = detail; } + [JsonInclude] + [JsonPropertyName("index")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? Index { get; private set; } + + /// + /// The external URL of the image, must be a supported image types: jpeg, jpg, png, gif, webp. + /// [JsonInclude] [JsonPropertyName("url")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public string Url { get; private set; } + /// + /// Specifies the detail level of the image if specified by the user. + /// 'low' uses fewer tokens, you can opt in to high resolution using 'high'. + /// [JsonInclude] [JsonPropertyName("detail")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public ImageDetail Detail { get; private set; } + + public override string ToString() => Url; + + public void AppendFrom(ImageUrl other) + { + if (other == null) { return; } + + if (!string.IsNullOrWhiteSpace(other.Url)) + { + Url += other.Url; + } + + if (other.Detail > 0) + { + Detail = other.Detail; + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/ListQuery.cs b/OpenAI-DotNet/Common/ListQuery.cs index f489c869..382655af 100644 --- a/OpenAI-DotNet/Common/ListQuery.cs +++ b/OpenAI-DotNet/Common/ListQuery.cs @@ -76,4 +76,4 @@ public static implicit operator Dictionary(ListQuery query) return parameters; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/OpenAIBaseEndpoint.cs b/OpenAI-DotNet/Common/OpenAIBaseEndpoint.cs index e785520d..e3a7482e 100644 --- a/OpenAI-DotNet/Common/OpenAIBaseEndpoint.cs +++ b/OpenAI-DotNet/Common/OpenAIBaseEndpoint.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Linq; +using System.Net.Http; namespace OpenAI { @@ -12,6 +13,10 @@ public abstract class OpenAIBaseEndpoint // ReSharper disable once InconsistentNaming protected readonly OpenAIClient client; + internal OpenAIClient Client => client; + + internal HttpClient HttpClient => client.Client; + /// /// The root endpoint address. /// diff --git a/OpenAI-DotNet/Common/Role.cs b/OpenAI-DotNet/Common/Role.cs index f3ddf017..db414147 100644 --- a/OpenAI-DotNet/Common/Role.cs +++ b/OpenAI-DotNet/Common/Role.cs @@ -13,4 +13,4 @@ public enum Role Function, Tool } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/ServerSentEvent.cs b/OpenAI-DotNet/Common/ServerSentEvent.cs new file mode 100644 index 00000000..1b8587b4 --- /dev/null +++ b/OpenAI-DotNet/Common/ServerSentEvent.cs @@ -0,0 +1,55 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Extensions; +using System.Collections.Generic; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; + +namespace OpenAI +{ + public sealed class ServerSentEvent : IServerSentEvent + { + internal static readonly Dictionary EventMap = new() + { + { "comment", ServerSentEventKind.Comment }, + { "event", ServerSentEventKind.Event }, + { "data", ServerSentEventKind.Data }, + { "id", ServerSentEventKind.Id }, + { "retry", ServerSentEventKind.Retry } + }; + + internal ServerSentEvent(ServerSentEventKind @event) => Event = @event; + + [JsonInclude] + public ServerSentEventKind Event { get; } + + [JsonInclude] + public JsonNode Value { get; internal set; } + + [JsonInclude] + public JsonNode Data { get; internal set; } + + [JsonIgnore] + public string Object => "stream.event"; + + public override string ToString() + => ToJsonString(); + + public string ToJsonString() + { + var @event = new JsonObject + { + { + Event.ToString().ToLower(), Value + } + }; + + if (Data != null) + { + @event.Add(ServerSentEventKind.Data.ToString().ToLower(), Data); + } + + return @event.ToEscapedJsonString(); + } + } +} diff --git a/OpenAI-DotNet/Common/SortOrder.cs b/OpenAI-DotNet/Common/SortOrder.cs index 639b638e..ef978e39 100644 --- a/OpenAI-DotNet/Common/SortOrder.cs +++ b/OpenAI-DotNet/Common/SortOrder.cs @@ -11,4 +11,4 @@ public enum SortOrder [EnumMember(Value = "asc")] Ascending, } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/StreamOptions.cs b/OpenAI-DotNet/Common/StreamOptions.cs new file mode 100644 index 00000000..20d8a145 --- /dev/null +++ b/OpenAI-DotNet/Common/StreamOptions.cs @@ -0,0 +1,12 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Text.Json.Serialization; + +namespace OpenAI +{ + public sealed class StreamOptions + { + [JsonPropertyName("include_usage")] + public bool IncludeUsage { get; } = true; + } +} diff --git a/OpenAI-DotNet/Common/TextContent.cs b/OpenAI-DotNet/Common/TextContent.cs new file mode 100644 index 00000000..179c7600 --- /dev/null +++ b/OpenAI-DotNet/Common/TextContent.cs @@ -0,0 +1,69 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Extensions; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; + +namespace OpenAI +{ + public sealed class TextContent : IAppendable + { + public TextContent() { } + + public TextContent(string value, IEnumerable annotations = null) + { + Value = value; + this.annotations = annotations?.ToList(); + } + + [JsonInclude] + [JsonPropertyName("index")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? Index { get; private set; } + + /// + /// The data that makes up the text. + /// + [JsonInclude] + [JsonPropertyName("value")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public string Value { get; private set; } + + private List annotations; + + /// + /// Annotations + /// + [JsonInclude] + [JsonPropertyName("annotations")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList Annotations + { + get => annotations; + private set => annotations = value?.ToList(); + } + + public static implicit operator TextContent(string value) => new(value); + + public static implicit operator string(TextContent content) => content.Value; + + public override string ToString() => Value; + + public void AppendFrom(TextContent other) + { + if (other == null) { return; } + + if (!string.IsNullOrWhiteSpace(other.Value)) + { + Value += other.Value; + } + + if (other is { Annotations: not null }) + { + annotations ??= new List(); + annotations.AppendFrom(other.Annotations); + } + } + } +} diff --git a/OpenAI-DotNet/Common/Tool.cs b/OpenAI-DotNet/Common/Tool.cs index 9e1aab7b..1912a45e 100644 --- a/OpenAI-DotNet/Common/Tool.cs +++ b/OpenAI-DotNet/Common/Tool.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Collections.Generic; using System.Linq; @@ -10,11 +11,11 @@ namespace OpenAI { - public sealed class Tool + public sealed class Tool : IAppendable { public Tool() { } - public Tool(Tool other) => CopyFrom(other); + public Tool(Tool other) => AppendFrom(other); public Tool(Function function) { @@ -24,7 +25,10 @@ public Tool(Function function) public static implicit operator Tool(Function function) => new(function); - public static Tool Retrieval { get; } = new() { Type = "retrieval" }; + [Obsolete("Use FileSearch")] + public static Tool Retrieval { get; } = new() { Type = "file_search" }; + + public static Tool FileSearch { get; } = new() { Type = "file_search" }; public static Tool CodeInterpreter { get; } = new() { Type = "code_interpreter" }; @@ -34,7 +38,7 @@ public Tool(Function function) [JsonInclude] [JsonPropertyName("index")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? Index { get; private set; } [JsonInclude] @@ -46,9 +50,11 @@ public Tool(Function function) [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public Function Function { get; private set; } - internal void CopyFrom(Tool other) + public void AppendFrom(Tool other) { - if (!string.IsNullOrWhiteSpace(other?.Id)) + if (other == null) { return; } + + if (!string.IsNullOrWhiteSpace(other.Id)) { Id = other.Id; } @@ -58,12 +64,12 @@ internal void CopyFrom(Tool other) Index = other.Index.Value; } - if (!string.IsNullOrWhiteSpace(other?.Type)) + if (!string.IsNullOrWhiteSpace(other.Type)) { Type = other.Type; } - if (other?.Function != null) + if (other.Function != null) { if (Function == null) { @@ -71,7 +77,7 @@ internal void CopyFrom(Tool other) } else { - Function.CopyFrom(other.Function); + Function.AppendFrom(other.Function); } } } @@ -106,12 +112,60 @@ public async Task InvokeFunctionAsync(CancellationToken cancellationToke public async Task InvokeFunctionAsync(CancellationToken cancellationToken = default) => await Function.InvokeAsync(cancellationToken).ConfigureAwait(false); + #region Tool Cache + private static readonly List toolCache = new() { - Retrieval, + FileSearch, CodeInterpreter }; + /// + /// Gets a list of all available tools. + /// + /// + /// This method will scan all assemblies for static methods decorated with the . + /// + /// Optional, Whether to include the default tools (Retrieval and CodeInterpreter). + /// Optional, Whether to force an update of the tool cache. + /// Optional, whether to force the tool cache to be cleared before updating. + /// A list of all available tools. + public static IReadOnlyList GetAllAvailableTools(bool includeDefaults = true, bool forceUpdate = false, bool clearCache = false) + { + if (clearCache) + { + ClearRegisteredTools(); + } + + if (forceUpdate || toolCache.All(tool => tool.Type != "function")) + { + var tools = new List(); + tools.AddRange( + from assembly in AppDomain.CurrentDomain.GetAssemblies() + from type in assembly.GetTypes() + from method in type.GetMethods() + where method.IsStatic + let functionAttribute = method.GetCustomAttribute() + where functionAttribute != null + let name = $"{type.FullName}.{method.Name}".Replace('.', '_') + let description = functionAttribute.Description + select Function.GetOrCreateFunction(name, description, method) + into function + select new Tool(function)); + + foreach (var newTool in tools.Where(tool => + !toolCache.Any(knownTool => + knownTool.Type == "function" && knownTool.Function.Name == tool.Function.Name && knownTool.Function.Instance == null))) + { + toolCache.Add(newTool); + } + } + + return !includeDefaults + ? toolCache.Where(tool => tool.Type == "function").ToList() + : toolCache; + } + /// /// Clears the tool cache of all previously registered tools. /// @@ -120,7 +174,7 @@ public static void ClearRegisteredTools() toolCache.Clear(); Function.ClearFunctionCache(); toolCache.Add(CodeInterpreter); - toolCache.Add(Retrieval); + toolCache.Add(FileSearch); } /// @@ -153,68 +207,27 @@ public static bool TryRegisterTool(Tool tool) toolCache.Add(tool); return true; - - } - - private static bool TryGetTool(string name, object instance, out Tool tool) - { - foreach (var knownTool in toolCache.Where(knownTool => - knownTool.Type == "function" && - knownTool.Function.Name == name && - ReferenceEquals(knownTool, instance))) - { - tool = knownTool; - return true; - } - - tool = null; - return false; } /// - /// Gets a list of all available tools. + /// Tries to remove a tool from the Tool cache. /// - /// - /// This method will scan all assemblies for static methods decorated with the . - /// - /// Optional, Whether to include the default tools (Retrieval and CodeInterpreter). - /// Optional, Whether to force an update of the tool cache. - /// Optional, whether to force the tool cache to be cleared before updating. - /// A list of all available tools. - public static IReadOnlyList GetAllAvailableTools(bool includeDefaults = true, bool forceUpdate = false, bool clearCache = false) + /// The tool to remove. + /// True, if the tool was removed from the cache. + /// + public static bool TryUnregisterTool(Tool tool) { - if (clearCache) + if (!IsToolRegistered(tool)) { - ClearRegisteredTools(); + return false; } - if (forceUpdate || toolCache.All(tool => tool.Type != "function")) + if (tool.Type != "function") { - var tools = new List(); - tools.AddRange( - from assembly in AppDomain.CurrentDomain.GetAssemblies() - from type in assembly.GetTypes() - from method in type.GetMethods() - where method.IsStatic - let functionAttribute = method.GetCustomAttribute() - where functionAttribute != null - let name = $"{type.FullName}.{method.Name}".Replace('.', '_') - let description = functionAttribute.Description - select new Function(name, description, method) - into function - select new Tool(function)); - - foreach (var newTool in tools.Where(tool => - !toolCache.Any(knownTool => - knownTool.Type == "function" && knownTool.Function.Name == tool.Function.Name && knownTool.Function.Instance == null))) - { - toolCache.Add(newTool); - } + throw new InvalidOperationException("Only function tools can be unregistered."); } - return !includeDefaults - ? toolCache.Where(tool => tool.Type == "function").ToList() - : toolCache; + return Function.TryRemoveFunction(tool.Function.Name) && toolCache.Remove(tool); } /// @@ -274,11 +287,27 @@ public static Tool GetOrCreateTool(object instance, string methodName, string de return tool; } - tool = new Tool(new Function(functionName, description ?? string.Empty, method, instance)); + tool = new Tool(Function.GetOrCreateFunction(functionName, description ?? string.Empty, method, instance)); toolCache.Add(tool); return tool; } + private static bool TryGetTool(string name, object instance, out Tool tool) + { + foreach (var knownTool in toolCache.Where(knownTool => + knownTool.Type == "function" && knownTool.Function.Name == name && + ReferenceEquals(knownTool.Function.Instance, instance))) + { + tool = knownTool; + return true; + } + + tool = null; + return false; + } + + #endregion Tool Cache + #region Func<,> Overloads public static Tool FromFunc(string name, Func function, string description = null) @@ -305,8 +334,7 @@ public static Tool FromFunc(string name, Func function return tool; } - public static Tool FromFunc(string name, Func function, - string description = null) + public static Tool FromFunc(string name, Func function, string description = null) { if (TryGetTool(name, function, out var tool)) { @@ -318,8 +346,7 @@ public static Tool FromFunc(string name, Func return tool; } - public static Tool FromFunc(string name, Func function, - string description = null) + public static Tool FromFunc(string name, Func function, string description = null) { if (TryGetTool(name, function, out var tool)) { @@ -430,4 +457,4 @@ public static Tool FromFunc Overloads } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Common/ToolResources.cs b/OpenAI-DotNet/Common/ToolResources.cs new file mode 100644 index 00000000..57949449 --- /dev/null +++ b/OpenAI-DotNet/Common/ToolResources.cs @@ -0,0 +1,50 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Text.Json.Serialization; + +namespace OpenAI +{ + /// + /// A set of resources to be used by Assistants and Threads. + /// The resources are specific to the type of tool. + /// For example, the requres a list of file ids, + /// While the requires a list vector store ids. + /// + public sealed class ToolResources + { + public ToolResources() { } + + /// + /// Constructor. + /// + /// . + /// . + public ToolResources(FileSearchResources fileSearch = null, CodeInterpreterResources codeInterpreter = null) + : this(codeInterpreter, fileSearch) + { + } + + /// + /// Constructor. + /// + /// . + /// . + public ToolResources(CodeInterpreterResources codeInterpreter = null, FileSearchResources fileSearch = null) + { + CodeInterpreter = codeInterpreter; + FileSearch = fileSearch; + } + + [JsonInclude] + [JsonPropertyName("code_interpreter")] + public CodeInterpreterResources CodeInterpreter { get; private set; } + + [JsonInclude] + [JsonPropertyName("file_search")] + public FileSearchResources FileSearch { get; private set; } + + public static implicit operator ToolResources(FileSearchResources fileSearch) => new(fileSearch); + + public static implicit operator ToolResources(CodeInterpreterResources codeInterpreter) => new(codeInterpreter); + } +} diff --git a/OpenAI-DotNet/Common/Usage.cs b/OpenAI-DotNet/Common/Usage.cs index 2849d9a2..6b8f3c59 100644 --- a/OpenAI-DotNet/Common/Usage.cs +++ b/OpenAI-DotNet/Common/Usage.cs @@ -9,7 +9,7 @@ public sealed class Usage { public Usage() { } - public Usage(int promptTokens, int completionTokens, int totalTokens) + private Usage(int? promptTokens, int? completionTokens, int? totalTokens) { PromptTokens = promptTokens; CompletionTokens = completionTokens; @@ -18,17 +18,20 @@ public Usage(int promptTokens, int completionTokens, int totalTokens) [JsonInclude] [JsonPropertyName("prompt_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? PromptTokens { get; private set; } [JsonInclude] [JsonPropertyName("completion_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? CompletionTokens { get; private set; } [JsonInclude] [JsonPropertyName("total_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? TotalTokens { get; private set; } - internal void CopyFrom(Usage other) + internal void AppendFrom(Usage other) { if (other?.PromptTokens != null) { diff --git a/OpenAI-DotNet/Common/VectorStoreRequest.cs b/OpenAI-DotNet/Common/VectorStoreRequest.cs new file mode 100644 index 00000000..60d05c04 --- /dev/null +++ b/OpenAI-DotNet/Common/VectorStoreRequest.cs @@ -0,0 +1,53 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI +{ + /// + /// A helper to create a vector store with file_ids and attach it to an assistant/thread. + /// There can be a maximum of 1 vector store attached to the assistant/thread. + /// + public sealed class VectorStoreRequest + { + /// + /// Constructor. + /// + /// + /// A list of file IDs to add to the vector store. + /// There can be a maximum of 10000 files in a vector store. + /// + /// + /// Optional, set of 16 key-value pairs that can be attached to a vector store. + /// This can be useful for storing additional information about the vector store in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + public VectorStoreRequest(IReadOnlyList fileIds, IReadOnlyDictionary metadata = null) + { + FileIds = fileIds; + Metadata = metadata; + } + + /// + /// A list of file IDs to add to the vector store. + /// There can be a maximum of 10000 files in a vector store. + /// + [JsonInclude] + [JsonPropertyName("file_ids")] + public IReadOnlyList FileIds { get; private set; } + + /// + /// Set of 16 key-value pairs that can be attached to a vector store. + /// This can be useful for storing additional information about the vector store in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + [JsonInclude] + [JsonPropertyName("metadata")] + public IReadOnlyDictionary Metadata { get; private set; } + + public static implicit operator VectorStoreRequest(string fileId) => new(new List { fileId }); + + public static implicit operator VectorStoreRequest(List fileIds) => new(fileIds); + } +} diff --git a/OpenAI-DotNet/Embeddings/EmbeddingsEndpoint.cs b/OpenAI-DotNet/Embeddings/EmbeddingsEndpoint.cs index 6d9cdaec..a6d1db7d 100644 --- a/OpenAI-DotNet/Embeddings/EmbeddingsEndpoint.cs +++ b/OpenAI-DotNet/Embeddings/EmbeddingsEndpoint.cs @@ -40,7 +40,7 @@ public EmbeddingsEndpoint(OpenAIClient client) : base(client) { } /// Only supported in text-embedding-3 and later models /// /// Optional, . - /// + /// . public async Task CreateEmbeddingAsync(string input, string model = null, string user = null, int? dimensions = null, CancellationToken cancellationToken = default) => await CreateEmbeddingAsync(new EmbeddingsRequest(input, model, user, dimensions), cancellationToken).ConfigureAwait(false); @@ -64,7 +64,7 @@ public async Task CreateEmbeddingAsync(string input, string /// Only supported in text-embedding-3 and later models /// /// Optional, . - /// + /// . public async Task CreateEmbeddingAsync(IEnumerable input, string model = null, string user = null, int? dimensions = null, CancellationToken cancellationToken = default) => await CreateEmbeddingAsync(new EmbeddingsRequest(input, model, user, dimensions), cancellationToken).ConfigureAwait(false); @@ -73,12 +73,12 @@ public async Task CreateEmbeddingAsync(IEnumerable i /// /// . /// Optional, . - /// + /// . public async Task CreateEmbeddingAsync(EmbeddingsRequest request, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl(), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } } diff --git a/OpenAI-DotNet/Embeddings/EmbeddingsRequest.cs b/OpenAI-DotNet/Embeddings/EmbeddingsRequest.cs index 19383fee..e49f4b31 100644 --- a/OpenAI-DotNet/Embeddings/EmbeddingsRequest.cs +++ b/OpenAI-DotNet/Embeddings/EmbeddingsRequest.cs @@ -20,7 +20,7 @@ public sealed class EmbeddingsRequest /// /// /// ID of the model to use.
- /// Defaults to: + /// Defaults to: /// /// /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. @@ -29,6 +29,7 @@ public sealed class EmbeddingsRequest /// The number of dimensions the resulting output embeddings should have. /// Only supported in text-embedding-3 and later models /// + /// A valid string is a Required parameter. public EmbeddingsRequest(string input, string model = null, string user = null, int? dimensions = null) : this(new List { input }, model, user, dimensions) { @@ -57,6 +58,7 @@ public EmbeddingsRequest(string input, string model = null, string user = null, /// The number of dimensions the resulting output embeddings should have. /// Only supported in text-embedding-3 and later models /// + /// A valid string is a Required parameter. public EmbeddingsRequest(IEnumerable input, string model = null, string user = null, int? dimensions = null) { Input = input?.ToList(); diff --git a/OpenAI-DotNet/Extensions/BaseEndpointExtensions.cs b/OpenAI-DotNet/Extensions/BaseEndpointExtensions.cs new file mode 100644 index 00000000..ad19f951 --- /dev/null +++ b/OpenAI-DotNet/Extensions/BaseEndpointExtensions.cs @@ -0,0 +1,130 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json.Nodes; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; + +namespace OpenAI.Extensions +{ + internal static class BaseEndpointExtensions + { + private const string ssePattern = @"(?:(?[^:\n]*):)(?[^\n]*)"; + + private static Regex sseRegex = new(ssePattern); + + /// + /// https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events + /// + public static async Task StreamEventsAsync(this OpenAIBaseEndpoint baseEndpoint, string endpoint, StringContent payload, Action eventCallback, CancellationToken cancellationToken) + { + using var request = new HttpRequestMessage(HttpMethod.Post, endpoint); + request.Content = payload; + var response = await baseEndpoint.HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + await response.CheckResponseAsync(false, payload, cancellationToken: cancellationToken).ConfigureAwait(false); + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + var events = new Stack(); + using var reader = new StreamReader(stream); + var isEndOfStream = false; + + try + { + while (await reader.ReadLineAsync() is { } streamData) + { + if (isEndOfStream) + { + break; + } + + cancellationToken.ThrowIfCancellationRequested(); + + if (string.IsNullOrWhiteSpace(streamData)) + { + continue; + } + + var matches = sseRegex.Matches(streamData); + + for (var i = 0; i < matches.Count; i++) + { + ServerSentEventKind type; + string value; + string data; + + Match match = matches[i]; + + // If the field type is not provided, treat it as a comment + type = ServerSentEvent.EventMap.GetValueOrDefault(match.Groups[nameof(type)].Value.Trim(), ServerSentEventKind.Comment); + + // The UTF-8 decode algorithm strips one leading UTF-8 Byte Order Mark (BOM), if any. + value = match.Groups[nameof(value)].Value.TrimStart(' '); + data = match.Groups[nameof(data)].Value; + + const string doneTag = "[DONE]"; + const string doneEvent = "done"; + + // if either value or data equals doneTag then stop processing events. + if (value.Equals(doneTag) || data.Equals(doneTag) || value.Equals(doneEvent)) + { + isEndOfStream = true; + break; + } + + var @event = new ServerSentEvent(type); + + try + { + @event.Value = JsonNode.Parse(value); + } + catch + { + @event.Value = value; + } + + if (!string.IsNullOrWhiteSpace(data)) + { + try + { + @event.Data = JsonNode.Parse(data); + } + catch + { + @event.Data = string.IsNullOrWhiteSpace(data) ? null : data; + } + } + + if (type == ServerSentEventKind.Data && + events.Count > 0 && + events.Peek().Event == ServerSentEventKind.Event) + { + var previousEvent = events.Pop(); + previousEvent.Data = @event.Value; + eventCallback?.Invoke(response, previousEvent); + events.Push(previousEvent); + } + else + { + if (type != ServerSentEventKind.Event) + { + eventCallback?.Invoke(response, @event); + } + + events.Push(@event); + } + } + } + } + finally + { + await response.CheckResponseAsync(baseEndpoint.EnableDebug, payload, null, events.Reverse().ToList(), cancellationToken).ConfigureAwait(false); + } + + return response; + } + } +} diff --git a/OpenAI-DotNet/Extensions/CollectionExtensions.cs b/OpenAI-DotNet/Extensions/CollectionExtensions.cs new file mode 100644 index 00000000..11795561 --- /dev/null +++ b/OpenAI-DotNet/Extensions/CollectionExtensions.cs @@ -0,0 +1,34 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; + +namespace OpenAI.Extensions +{ + internal static class CollectionExtensions + { + public static void AppendFrom(this List self, IReadOnlyList other) + where T : IAppendable, new() + { + if (other == null) + { + return; + } + + foreach (var otherItem in other) + { + if (otherItem == null || !otherItem.Index.HasValue) { continue; } + + if (otherItem.Index + 1 > self.Count) + { + var newItem = new T(); + newItem.AppendFrom(otherItem); + self.Insert(otherItem.Index.Value, newItem); + } + else + { + self[otherItem.Index.Value].AppendFrom(otherItem); + } + } + } + } +} diff --git a/OpenAI-DotNet/Extensions/IAppendable.cs b/OpenAI-DotNet/Extensions/IAppendable.cs new file mode 100644 index 00000000..f725ff3a --- /dev/null +++ b/OpenAI-DotNet/Extensions/IAppendable.cs @@ -0,0 +1,9 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +namespace OpenAI.Extensions +{ + internal interface IAppendable : IIndexable + { + void AppendFrom(T other); + } +} diff --git a/OpenAI-DotNet/Extensions/IIndexable.cs b/OpenAI-DotNet/Extensions/IIndexable.cs new file mode 100644 index 00000000..2b0240d9 --- /dev/null +++ b/OpenAI-DotNet/Extensions/IIndexable.cs @@ -0,0 +1,9 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +namespace OpenAI.Extensions +{ + internal interface IIndexable + { + int? Index { get; } + } +} diff --git a/OpenAI-DotNet/Extensions/JsonStringEnumConverter.cs b/OpenAI-DotNet/Extensions/JsonStringEnumConverter.cs index 5073553e..f59c2099 100644 --- a/OpenAI-DotNet/Extensions/JsonStringEnumConverter.cs +++ b/OpenAI-DotNet/Extensions/JsonStringEnumConverter.cs @@ -58,29 +58,29 @@ public override TEnum Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSe switch (type) { case JsonTokenType.String: - { - var stringValue = reader.GetString(); - - if (stringValue != null) { - var value = namingPolicy != null - ? namingPolicy.ConvertName(stringValue) - : stringValue; + var stringValue = reader.GetString(); - if (stringToEnum.TryGetValue(value, out var enumValue)) + if (stringValue != null) { - return enumValue; + var value = namingPolicy != null + ? namingPolicy.ConvertName(stringValue) + : stringValue; + + if (stringToEnum.TryGetValue(value, out var enumValue)) + { + return enumValue; + } } - } - break; - } + break; + } case JsonTokenType.Number: - { - var numValue = reader.GetInt32(); - numberToEnum.TryGetValue(numValue, out var enumValue); - return enumValue; - } + { + var numValue = reader.GetInt32(); + numberToEnum.TryGetValue(numValue, out var enumValue); + return enumValue; + } } return default; @@ -89,4 +89,4 @@ public override TEnum Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSe public override void Write(Utf8JsonWriter writer, TEnum value, JsonSerializerOptions options) => writer.WriteStringValue(enumToString[value]); } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Extensions/JsonStringEnumConverterFactory.cs b/OpenAI-DotNet/Extensions/JsonStringEnumConverterFactory.cs index cdb41327..fe20b8cc 100644 --- a/OpenAI-DotNet/Extensions/JsonStringEnumConverterFactory.cs +++ b/OpenAI-DotNet/Extensions/JsonStringEnumConverterFactory.cs @@ -17,4 +17,4 @@ public override bool CanConvert(Type typeToConvert) public override JsonConverter CreateConverter(Type typeToConvert, JsonSerializerOptions options) => (JsonConverter)Activator.CreateInstance(typeof(JsonStringEnumConverter<>).MakeGenericType(typeToConvert))!; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Extensions/ResponseExtensions.cs b/OpenAI-DotNet/Extensions/ResponseExtensions.cs index f55f76e0..24172761 100644 --- a/OpenAI-DotNet/Extensions/ResponseExtensions.cs +++ b/OpenAI-DotNet/Extensions/ResponseExtensions.cs @@ -9,6 +9,7 @@ using System.Net.Http.Headers; using System.Runtime.CompilerServices; using System.Text; +using System.Text.Encodings.Web; using System.Text.Json; using System.Text.Json.Nodes; using System.Threading; @@ -35,6 +36,12 @@ internal static class ResponseExtensions NumberDecimalSeparator = "." }; + private static readonly JsonSerializerOptions debugJsonOptions = new() + { + WriteIndented = true, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping + }; + internal static void SetResponseData(this BaseResponse response, HttpResponseHeaders headers, OpenAIClient client) { if (response is IListResponse listResponse) @@ -106,7 +113,37 @@ internal static void SetResponseData(this BaseResponse response, HttpResponseHea } } - internal static async Task ReadAsStringAsync(this HttpResponseMessage response, bool debugResponse, HttpContent requestContent = null, MemoryStream responseStream = null, CancellationToken cancellationToken = default, [CallerMemberName] string methodName = null) + internal static async Task CheckResponseAsync(this HttpResponseMessage response, bool debug, CancellationToken cancellationToken, [CallerMemberName] string methodName = null) + { + if (!response.IsSuccessStatusCode || debug) + { + await response.ReadAsStringAsync(debug, null, null, null, cancellationToken, methodName).ConfigureAwait(false); + } + } + + internal static async Task CheckResponseAsync(this HttpResponseMessage response, bool debug, StringContent requestContent, CancellationToken cancellationToken, [CallerMemberName] string methodName = null) + { + if (!response.IsSuccessStatusCode || debug) + { + await response.ReadAsStringAsync(debug, requestContent, null, null, cancellationToken, methodName).ConfigureAwait(false); + } + } + + internal static async Task CheckResponseAsync(this HttpResponseMessage response, bool debug, StringContent requestContent, MemoryStream responseStream, List events, CancellationToken cancellationToken, [CallerMemberName] string methodName = null) + { + if (!response.IsSuccessStatusCode || debug) + { + await response.ReadAsStringAsync(debug, requestContent, responseStream, events, cancellationToken, methodName).ConfigureAwait(false); + } + } + + internal static async Task ReadAsStringAsync(this HttpResponseMessage response, bool debugResponse, HttpContent requestContent, CancellationToken cancellationToken, [CallerMemberName] string methodName = null) + => await response.ReadAsStringAsync(debugResponse, requestContent, null, null, cancellationToken, methodName).ConfigureAwait(false); + + internal static async Task ReadAsStringAsync(this HttpResponseMessage response, bool debugResponse, CancellationToken cancellationToken, [CallerMemberName] string methodName = null) + => await response.ReadAsStringAsync(debugResponse, null, null, null, cancellationToken, methodName).ConfigureAwait(false); + + internal static async Task ReadAsStringAsync(this HttpResponseMessage response, bool debugResponse, HttpContent requestContent, MemoryStream responseStream, List events, CancellationToken cancellationToken, [CallerMemberName] string methodName = null) { var responseAsString = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); var debugMessage = new StringBuilder(); @@ -185,7 +222,7 @@ internal static async Task ReadAsStringAsync(this HttpResponseMessage re ["Headers"] = response.Headers.ToDictionary(pair => pair.Key, pair => pair.Value), }; - if (responseStream != null || !string.IsNullOrWhiteSpace(responseAsString)) + if (events != null || responseStream != null || !string.IsNullOrWhiteSpace(responseAsString)) { debugMessageObject["Response"]["Body"] = new Dictionary(); } @@ -196,12 +233,33 @@ internal static async Task ReadAsStringAsync(this HttpResponseMessage re try { - ((Dictionary)debugMessageObject["Response"]["Body"])["Stream"] = JsonNode.Parse(body); + ((Dictionary)debugMessageObject["Response"]["Body"])["Events"] = JsonNode.Parse(body); } catch { - ((Dictionary)debugMessageObject["Response"]["Body"])["Stream"] = body; + ((Dictionary)debugMessageObject["Response"]["Body"])["Events"] = body; + } + } + else if (events != null) + { + var array = new JsonArray(); + + foreach (var @event in events) + { + var @object = new JsonObject + { + [@event.Event.ToString().ToLower()] = JsonNode.Parse(@event.Value.ToJsonString()) + }; + + if (@event.Data != null) + { + @object[ServerSentEventKind.Data.ToString().ToLower()] = JsonNode.Parse(@event.Data.ToJsonString()); + } + + array.Add(@object); } + + ((Dictionary)debugMessageObject["Response"]["Body"])["Events"] = array; } if (!string.IsNullOrWhiteSpace(responseAsString)) @@ -216,7 +274,7 @@ internal static async Task ReadAsStringAsync(this HttpResponseMessage re } } - debugMessage.Append(JsonSerializer.Serialize(debugMessageObject, new JsonSerializerOptions { WriteIndented = true })); + debugMessage.Append(JsonSerializer.Serialize(debugMessageObject, debugJsonOptions)); Console.WriteLine(debugMessage.ToString()); } @@ -228,17 +286,31 @@ internal static async Task ReadAsStringAsync(this HttpResponseMessage re return responseAsString; } - internal static async Task CheckResponseAsync(this HttpResponseMessage response, bool debug, StringContent requestContent = null, MemoryStream responseStream = null, CancellationToken cancellationToken = default, [CallerMemberName] string methodName = null) + internal static T Deserialize(this HttpResponseMessage response, string json, OpenAIClient client) + where T : BaseResponse { - if (!response.IsSuccessStatusCode || debug) - { - await response.ReadAsStringAsync(debug, requestContent, responseStream, cancellationToken, methodName).ConfigureAwait(false); - } + var result = JsonSerializer.Deserialize(json, OpenAIClient.JsonSerializationOptions); + result.SetResponseData(response.Headers, client); + return result; } - internal static T Deserialize(this HttpResponseMessage response, string json, OpenAIClient client) where T : BaseResponse + internal static T Deserialize(this HttpResponseMessage response, ServerSentEvent ssEvent, OpenAIClient client) + where T : BaseResponse { - var result = JsonSerializer.Deserialize(json, OpenAIClient.JsonSerializationOptions); + T result; + + var jNode = ssEvent.Data ?? ssEvent.Value; + + try + { + result = jNode.Deserialize(OpenAIClient.JsonSerializationOptions); + } + catch (Exception e) + { + Console.WriteLine($"Failed to parse {typeof(T).Name} -> {jNode.ToJsonString(debugJsonOptions)}\n{e}"); + throw; + } + result.SetResponseData(response.Headers, client); return result; } diff --git a/OpenAI-DotNet/Extensions/ResponseFormatConverter.cs b/OpenAI-DotNet/Extensions/ResponseFormatConverter.cs new file mode 100644 index 00000000..b0d528d9 --- /dev/null +++ b/OpenAI-DotNet/Extensions/ResponseFormatConverter.cs @@ -0,0 +1,69 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace OpenAI.Extensions +{ + internal sealed class ResponseFormatConverter : JsonConverter + { + private class ResponseFormatObject + { + public ResponseFormatObject(ChatResponseFormat type) => Type = type; + + [JsonInclude] + [JsonPropertyName("type")] + [JsonConverter(typeof(JsonStringEnumConverter))] + public ChatResponseFormat Type { get; private set; } + + public static implicit operator ResponseFormatObject(ChatResponseFormat type) => new(type); + + public static implicit operator ChatResponseFormat(ResponseFormatObject format) => format.Type; + } + + public override ChatResponseFormat Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + try + { + if (reader.TokenType is JsonTokenType.Null or JsonTokenType.String) + { + return ChatResponseFormat.Auto; + } + + return JsonSerializer.Deserialize(ref reader, options); + } + catch (Exception e) + { + throw new Exception($"Error reading {typeof(ChatResponseFormat)} from JSON.", e); + } + } + + public override void Write(Utf8JsonWriter writer, ChatResponseFormat value, JsonSerializerOptions options) + { + const string type = nameof(type); + const string text = nameof(text); + // ReSharper disable once InconsistentNaming + const string json_object = nameof(json_object); + + switch (value) + { + case ChatResponseFormat.Auto: + writer.WriteNullValue(); + break; + case ChatResponseFormat.Text: + writer.WriteStartObject(); + writer.WriteString(type, text); + writer.WriteEndObject(); + break; + case ChatResponseFormat.Json: + writer.WriteStartObject(); + writer.WriteString(type, json_object); + writer.WriteEndObject(); + break; + default: + throw new ArgumentOutOfRangeException(nameof(value), value, null); + } + } + } +} diff --git a/OpenAI-DotNet/Extensions/ServerSentEventKind.cs b/OpenAI-DotNet/Extensions/ServerSentEventKind.cs new file mode 100644 index 00000000..ee94325a --- /dev/null +++ b/OpenAI-DotNet/Extensions/ServerSentEventKind.cs @@ -0,0 +1,13 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +namespace OpenAI.Extensions +{ + public enum ServerSentEventKind + { + Comment, + Event, + Data, + Id, + Retry, + } +} diff --git a/OpenAI-DotNet/Extensions/SnakeCaseNamingPolicy.cs b/OpenAI-DotNet/Extensions/SnakeCaseNamingPolicy.cs index 5754663d..c08f327b 100644 --- a/OpenAI-DotNet/Extensions/SnakeCaseNamingPolicy.cs +++ b/OpenAI-DotNet/Extensions/SnakeCaseNamingPolicy.cs @@ -9,4 +9,4 @@ internal sealed class SnakeCaseNamingPolicy : JsonNamingPolicy public override string ConvertName(string name) => StringExtensions.ToSnakeCase(name); } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Extensions/StringExtensions.cs b/OpenAI-DotNet/Extensions/StringExtensions.cs index fc8b289e..e071e4cf 100644 --- a/OpenAI-DotNet/Extensions/StringExtensions.cs +++ b/OpenAI-DotNet/Extensions/StringExtensions.cs @@ -2,6 +2,8 @@ using System.Linq; using System.Net.Http; +using System.Text.Encodings.Web; +using System.Text.Json; namespace OpenAI.Extensions { @@ -41,5 +43,13 @@ public static string ToSnakeCase(string @string) @string.Select((x, i) => i > 0 && char.IsUpper(x) ? $"_{x}" : x.ToString())).ToLower(); + + public static string ToEscapedJsonString(this T @object) + => JsonSerializer.Serialize(@object, escapedJsonOptions); + + private static readonly JsonSerializerOptions escapedJsonOptions = new() + { + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping + }; } } diff --git a/OpenAI-DotNet/Extensions/StringOrObjectConverter.cs b/OpenAI-DotNet/Extensions/StringOrObjectConverter.cs new file mode 100644 index 00000000..ff2bb4d3 --- /dev/null +++ b/OpenAI-DotNet/Extensions/StringOrObjectConverter.cs @@ -0,0 +1,38 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace OpenAI.Extensions +{ + internal sealed class StringOrObjectConverter : JsonConverter + { + public override dynamic Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + => reader.TokenType switch + { + JsonTokenType.Null => null, + JsonTokenType.String => reader.GetString(), + JsonTokenType.StartObject => JsonSerializer.Deserialize(ref reader, options), + _ => throw new JsonException($"Unexpected token type: {reader.TokenType}") + }; + + public override void Write(Utf8JsonWriter writer, dynamic value, JsonSerializerOptions options) + { + switch (value) + { + case null: + writer.WriteNullValue(); + break; + case string stringValue: + writer.WriteStringValue(stringValue); + break; + case T @object: + JsonSerializer.Serialize(writer, @object, options); + break; + default: + throw new JsonException($"Unexpected value type: {value.GetType()}"); + } + } + } +} diff --git a/OpenAI-DotNet/Extensions/TypeExtensions.cs b/OpenAI-DotNet/Extensions/TypeExtensions.cs index fa3fdfc0..6497153a 100644 --- a/OpenAI-DotNet/Extensions/TypeExtensions.cs +++ b/OpenAI-DotNet/Extensions/TypeExtensions.cs @@ -266,4 +266,4 @@ private static Type GetMemberType(MemberInfo member) _ => throw new ArgumentException($"{nameof(MemberInfo)} must be of type {nameof(FieldInfo)}, {nameof(PropertyInfo)}", nameof(member)) }; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Files/FileData.cs b/OpenAI-DotNet/Files/FileData.cs deleted file mode 100644 index f15b1dc6..00000000 --- a/OpenAI-DotNet/Files/FileData.cs +++ /dev/null @@ -1,70 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System; -using System.Text.Json.Serialization; - -namespace OpenAI.Files -{ - /// - /// The File object represents a document that has been uploaded to OpenAI. - /// - [Obsolete("use FileResponse")] - public sealed class FileData : BaseResponse - { - /// - /// The file identifier, which can be referenced in the API endpoints. - /// - [JsonInclude] - [JsonPropertyName("id")] - public string Id { get; private set; } - - /// - /// The object type, which is always 'file'. - /// - [JsonInclude] - [JsonPropertyName("object")] - public string Object { get; private set; } - - /// - /// The size of the file, in bytes. - /// - [JsonInclude] - [JsonPropertyName("bytes")] - public int Size { get; private set; } - - /// - /// The Unix timestamp (in seconds) for when the file was created. - /// - [JsonInclude] - [JsonPropertyName("created_at")] - public int CreatedAtUnixTimeSeconds { get; private set; } - - [JsonIgnore] - [Obsolete("Use CreatedAtUnixTimeSeconds")] - public int CreatedUnixTime => CreatedAtUnixTimeSeconds; - - [JsonIgnore] - public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; - - /// - /// The name of the file. - /// - [JsonInclude] - [JsonPropertyName("filename")] - public string FileName { get; private set; } - - /// - /// The intended purpose of the file. - /// Supported values are 'fine-tune', 'fine-tune-results', 'assistants', and 'assistants_output'. - /// - [JsonInclude] - [JsonPropertyName("purpose")] - public string Purpose { get; private set; } - - public static implicit operator string(FileData fileData) => fileData?.ToString(); - - public static implicit operator FileResponse(FileData fileData) => new(fileData); - - public override string ToString() => Id; - } -} diff --git a/OpenAI-DotNet/Files/FilePurpose.cs b/OpenAI-DotNet/Files/FilePurpose.cs new file mode 100644 index 00000000..fc5d3a08 --- /dev/null +++ b/OpenAI-DotNet/Files/FilePurpose.cs @@ -0,0 +1,22 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +namespace OpenAI.Files +{ + public class FilePurpose + { + public const string Assistants = "assistants"; + public const string Batch = "batch"; + public const string FineTune = "fine-tune"; + public const string Vision = "vision"; + + public FilePurpose(string purpose) => Value = purpose; + + public string Value { get; } + + public override string ToString() => Value; + + public static implicit operator FilePurpose(string purpose) => new(purpose); + + public static implicit operator string(FilePurpose purpose) => purpose?.ToString(); + } +} diff --git a/OpenAI-DotNet/Files/FileResponse.cs b/OpenAI-DotNet/Files/FileResponse.cs index 6d21a743..8df95c10 100644 --- a/OpenAI-DotNet/Files/FileResponse.cs +++ b/OpenAI-DotNet/Files/FileResponse.cs @@ -10,20 +10,6 @@ namespace OpenAI.Files ///
public sealed class FileResponse : BaseResponse { - public FileResponse() { } - -#pragma warning disable CS0618 // Type or member is obsolete - internal FileResponse(FileData file) - { - Id = file.Id; - Object = file.Object; - Size = file.Size; - CreatedAtUnixTimeSeconds = file.CreatedAtUnixTimeSeconds; - FileName = file.FileName; - Purpose = file.Purpose; - } -#pragma warning restore CS0618 // Type or member is obsolete - /// /// The file identifier, which can be referenced in the API endpoints. /// @@ -31,13 +17,6 @@ internal FileResponse(FileData file) [JsonPropertyName("id")] public string Id { get; private set; } - /// - /// The object type, which is always 'file'. - /// - [JsonInclude] - [JsonPropertyName("object")] - public string Object { get; private set; } - /// /// The size of the file, in bytes. /// @@ -45,6 +24,9 @@ internal FileResponse(FileData file) [JsonPropertyName("bytes")] public int? Size { get; private set; } + [JsonIgnore] + public int? Bytes => Size; + /// /// The Unix timestamp (in seconds) for when the file was created. /// @@ -52,10 +34,6 @@ internal FileResponse(FileData file) [JsonPropertyName("created_at")] public int CreatedAtUnixTimeSeconds { get; private set; } - [JsonIgnore] - [Obsolete("Use CreatedAtUnixTimeSeconds")] - public int CreatedUnixTime => CreatedAtUnixTimeSeconds; - [JsonIgnore] public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; @@ -66,6 +44,13 @@ internal FileResponse(FileData file) [JsonPropertyName("filename")] public string FileName { get; private set; } + /// + /// The object type, which is always 'file'. + /// + [JsonInclude] + [JsonPropertyName("object")] + public string Object { get; private set; } + /// /// The intended purpose of the file. /// Supported values are 'fine-tune', 'fine-tune-results', 'assistants', and 'assistants_output'. @@ -74,8 +59,8 @@ internal FileResponse(FileData file) [JsonPropertyName("purpose")] public string Purpose { get; private set; } - public static implicit operator string(FileResponse file) => file?.ToString(); + public static implicit operator string(FileResponse fileData) => fileData.Id; public override string ToString() => Id; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Files/FilesEndpoint.cs b/OpenAI-DotNet/Files/FilesEndpoint.cs index 4fda6ee5..16968c8d 100644 --- a/OpenAI-DotNet/Files/FilesEndpoint.cs +++ b/OpenAI-DotNet/Files/FilesEndpoint.cs @@ -6,7 +6,6 @@ using System.IO; using System.Net; using System.Net.Http; -using System.Text.Json; using System.Text.Json.Serialization; using System.Threading; using System.Threading.Tasks; @@ -14,15 +13,16 @@ namespace OpenAI.Files { /// - /// Files are used to upload documents that can be used with features like Fine-tuning.
+ /// Files are used to upload documents that can be used with features like Assistants, Fine-tuning, and Batch API.
/// ///
public sealed class FilesEndpoint : OpenAIBaseEndpoint { - private class FilesList + private class FilesList : BaseResponse { + [JsonInclude] [JsonPropertyName("data")] - public IReadOnlyList Files { get; set; } + public IReadOnlyList Files { get; private set; } } /// @@ -47,25 +47,33 @@ public async Task> ListFilesAsync(string purpose = n } using var response = await client.Client.GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false); - var resultAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(resultAsString, OpenAIClient.JsonSerializationOptions)?.Files; + var resultAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(resultAsString, client)?.Files; } /// - /// Upload a file that contains document(s) to be used across various endpoints/features. - /// Currently, the size of all the files uploaded by one organization can be up to 1 GB. - /// Please contact us if you need to increase the storage limit. + /// Upload a file that can be used across various endpoints. + /// Individual files can be up to 512 MB, and the size of all files uploaded by one organization can be up to 100 GB. /// /// /// Local file path to upload. /// /// - /// The intended purpose of the uploaded documents. - /// If the purpose is set to "fine-tune", each line is a JSON record with "prompt" and "completion" - /// fields representing your training examples. + /// The intended purpose of the uploaded file. + /// Use 'assistants' for Assistants and Message files, + /// 'vision' for Assistants image file inputs, + /// 'batch' for Batch API, + /// and 'fine-tune' for Fine-tuning. /// /// Optional, . /// . + /// + /// + /// The Assistants API supports files up to 2 million tokens and of specific file types. + /// The Fine-tuning API only supports .jsonl files. + /// The Batch API only supports .jsonl files up to 100 MB in size. + /// + /// public async Task UploadFileAsync(string filePath, string purpose, CancellationToken cancellationToken = default) => await UploadFileAsync(new FileUploadRequest(filePath, purpose), cancellationToken).ConfigureAwait(false); @@ -86,8 +94,8 @@ public async Task UploadFileAsync(FileUploadRequest request, Cance content.Add(new ByteArrayContent(fileData.ToArray()), "file", request.FileName); request.Dispose(); using var response = await client.Client.PostAsync(GetUrl(), content, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, content, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, content, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); } /// @@ -120,8 +128,8 @@ async Task InternalDeleteFileAsync(int attempt) } } - await response.CheckResponseAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions)?.Deleted ?? false; + await response.CheckResponseAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client)?.Deleted ?? false; } } @@ -130,12 +138,12 @@ async Task InternalDeleteFileAsync(int attempt) /// /// The ID of the file to use for this request. /// Optional, . - /// + /// . public async Task GetFileInfoAsync(string fileId, CancellationToken cancellationToken = default) { using var response = await client.Client.GetAsync(GetUrl($"/{fileId}"), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); } /// @@ -144,7 +152,7 @@ public async Task GetFileInfoAsync(string fileId, CancellationToke /// The file id to download. /// The directory to download the file into. /// Optional, delete cached file. Default is false. - /// Optional, + /// Optional, . /// The full path of the downloaded file. public async Task DownloadFileAsync(string fileId, string directory, bool deleteCachedFile = false, CancellationToken cancellationToken = default) { @@ -158,7 +166,7 @@ public async Task DownloadFileAsync(string fileId, string directory, boo /// to download. /// The directory to download the file into. /// Optional, delete cached file. Default is false. - /// Optional, + /// Optional, . /// The full path of the downloaded file. public async Task DownloadFileAsync(FileResponse fileData, string directory, bool deleteCachedFile = false, CancellationToken cancellationToken = default) { @@ -196,11 +204,9 @@ public async Task DownloadFileAsync(FileResponse fileData, string direct /// Gets the specified file as stream /// /// to download. - /// Optional, + /// Optional, . /// The file as a stream in an asynchronous operation. public async Task RetrieveFileStreamAsync(FileResponse fileData, CancellationToken cancellationToken = default) - { - return await client.Client.GetStreamAsync(GetUrl($"/{fileData.Id}/content"), cancellationToken).ConfigureAwait(false); - } + => await client.Client.GetStreamAsync(GetUrl($"/{fileData.Id}/content"), cancellationToken).ConfigureAwait(false); } } diff --git a/OpenAI-DotNet/FineTuning/CreateFineTuneJobRequest.cs b/OpenAI-DotNet/FineTuning/CreateFineTuneJobRequest.cs index c21248e6..e4a32f52 100644 --- a/OpenAI-DotNet/FineTuning/CreateFineTuneJobRequest.cs +++ b/OpenAI-DotNet/FineTuning/CreateFineTuneJobRequest.cs @@ -14,7 +14,7 @@ public CreateFineTuneJobRequest( string suffix = null, string validationFileId = null) { - Model = model ?? Models.Model.GPT3_5_Turbo; + Model = model ?? Models.Model.GPT4_Turbo; TrainingFileId = trainingFileId; HyperParameters = hyperParameters; Suffix = suffix; diff --git a/OpenAI-DotNet/FineTuning/EventList.cs b/OpenAI-DotNet/FineTuning/EventList.cs deleted file mode 100644 index dfcbe9a0..00000000 --- a/OpenAI-DotNet/FineTuning/EventList.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace OpenAI.FineTuning -{ - [Obsolete("Use ListResponse")] - public sealed class EventList - { - [JsonInclude] - [JsonPropertyName("object")] - public string Object { get; private set; } - - [JsonInclude] - [JsonPropertyName("data")] - public IReadOnlyList Events { get; private set; } - - [JsonInclude] - [JsonPropertyName("has_more")] - public bool HasMore { get; private set; } - } -} \ No newline at end of file diff --git a/OpenAI-DotNet/FineTuning/FineTuneJob.cs b/OpenAI-DotNet/FineTuning/FineTuneJob.cs deleted file mode 100644 index 7419b8d2..00000000 --- a/OpenAI-DotNet/FineTuning/FineTuneJob.cs +++ /dev/null @@ -1,93 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace OpenAI.FineTuning -{ - [Obsolete("use FineTuneJobResponse")] - public sealed class FineTuneJob : BaseResponse - { - [JsonInclude] - [JsonPropertyName("object")] - public string Object { get; private set; } - - [JsonInclude] - [JsonPropertyName("id")] - public string Id { get; private set; } - - [JsonInclude] - [JsonPropertyName("model")] - public string Model { get; private set; } - - [JsonInclude] - [JsonPropertyName("created_at")] - public int? CreateAtUnixTimeSeconds { get; private set; } - - [JsonIgnore] - [Obsolete("Use CreateAtUnixTimeSeconds")] - public int? CreatedAtUnixTime => CreateAtUnixTimeSeconds; - - [JsonIgnore] - public DateTime? CreatedAt - => CreateAtUnixTimeSeconds.HasValue - ? DateTimeOffset.FromUnixTimeSeconds(CreateAtUnixTimeSeconds.Value).DateTime - : null; - - [JsonInclude] - [JsonPropertyName("finished_at")] - public int? FinishedAtUnixTimeSeconds { get; private set; } - - [JsonIgnore] - [Obsolete("Use FinishedAtUnixTimeSeconds")] - public int? FinishedAtUnixTime => CreateAtUnixTimeSeconds; - - [JsonIgnore] - public DateTime? FinishedAt - => FinishedAtUnixTimeSeconds.HasValue - ? DateTimeOffset.FromUnixTimeSeconds(FinishedAtUnixTimeSeconds.Value).DateTime - : null; - - [JsonInclude] - [JsonPropertyName("fine_tuned_model")] - public string FineTunedModel { get; private set; } - - [JsonInclude] - [JsonPropertyName("organization_id")] - public string OrganizationId { get; private set; } - - [JsonInclude] - [JsonPropertyName("result_files")] - public IReadOnlyList ResultFiles { get; private set; } - - [JsonInclude] - [JsonPropertyName("status")] - public JobStatus Status { get; private set; } - - [JsonInclude] - [JsonPropertyName("validation_file")] - public string ValidationFile { get; private set; } - - [JsonInclude] - [JsonPropertyName("training_file")] - public string TrainingFile { get; private set; } - - [JsonInclude] - [JsonPropertyName("hyperparameters")] - public HyperParams HyperParameters { get; private set; } - - [JsonInclude] - [JsonPropertyName("trained_tokens")] - public int? TrainedTokens { get; private set; } - - [JsonIgnore] - public IReadOnlyList Events { get; internal set; } = new List(); - - public static implicit operator FineTuneJobResponse(FineTuneJob job) => new(job); - - public static implicit operator string(FineTuneJob job) => job?.ToString(); - - public override string ToString() => Id; - } -} diff --git a/OpenAI-DotNet/FineTuning/FineTuneJobList.cs b/OpenAI-DotNet/FineTuning/FineTuneJobList.cs deleted file mode 100644 index 60dabe03..00000000 --- a/OpenAI-DotNet/FineTuning/FineTuneJobList.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace OpenAI.FineTuning -{ - [Obsolete("Use ListResponse")] - public sealed class FineTuneJobList - { - [JsonInclude] - [JsonPropertyName("object")] - public string Object { get; private set; } - - [JsonInclude] - [JsonPropertyName("data")] - public IReadOnlyList Jobs { get; private set; } - - [JsonInclude] - [JsonPropertyName("has_more")] - public bool HasMore { get; private set; } - } -} \ No newline at end of file diff --git a/OpenAI-DotNet/FineTuning/FineTuneJobResponse.cs b/OpenAI-DotNet/FineTuning/FineTuneJobResponse.cs index 9674e2ac..d0cd0f37 100644 --- a/OpenAI-DotNet/FineTuning/FineTuneJobResponse.cs +++ b/OpenAI-DotNet/FineTuning/FineTuneJobResponse.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Collections.Generic; using System.Linq; @@ -9,34 +10,6 @@ namespace OpenAI.FineTuning { public sealed class FineTuneJobResponse : BaseResponse { - public FineTuneJobResponse() { } - -#pragma warning disable CS0618 // Type or member is obsolete - internal FineTuneJobResponse(FineTuneJob job) - { - Object = job.Object; - Id = job.Id; - Model = job.Model; - CreateAtUnixTimeSeconds = job.CreateAtUnixTimeSeconds; - FinishedAtUnixTimeSeconds = job.FinishedAtUnixTimeSeconds; - FineTunedModel = job.FineTunedModel; - OrganizationId = job.OrganizationId; - ResultFiles = job.ResultFiles; - Status = job.Status; - ValidationFile = job.ValidationFile; - TrainingFile = job.TrainingFile; - HyperParameters = job.HyperParameters; - TrainedTokens = job.TrainedTokens; - events = new List(job.Events.Count); - - foreach (var jobEvent in job.Events) - { - jobEvent.Client = Client; - events.Add(jobEvent); - } - } -#pragma warning restore CS0618 // Type or member is obsolete - [JsonInclude] [JsonPropertyName("object")] public string Object { get; private set; } @@ -83,6 +56,7 @@ public DateTime? FinishedAt [JsonInclude] [JsonPropertyName("status")] + [JsonConverter(typeof(JsonStringEnumConverter))] public JobStatus Status { get; private set; } [JsonInclude] @@ -122,4 +96,4 @@ internal set public override string ToString() => Id; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/FineTuning/FineTuningEndpoint.cs b/OpenAI-DotNet/FineTuning/FineTuningEndpoint.cs index ce646b46..1aa74ebb 100644 --- a/OpenAI-DotNet/FineTuning/FineTuningEndpoint.cs +++ b/OpenAI-DotNet/FineTuning/FineTuningEndpoint.cs @@ -30,9 +30,9 @@ public FineTuningEndpoint(OpenAIClient client) : base(client) { } /// . public async Task CreateJobAsync(CreateFineTuneJobRequest jobRequest, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(jobRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl("/jobs"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(jobRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl("/jobs"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -52,7 +52,7 @@ public async Task> ListJobsAsync(ListQuery que /// /// Gets info about the fine-tune job. /// - /// . + /// . /// Optional, . /// . public async Task GetJobInfoAsync(string jobId, CancellationToken cancellationToken = default) @@ -67,21 +67,21 @@ public async Task GetJobInfoAsync(string jobId, Cancellatio /// /// Immediately cancel a fine-tune job. /// - /// to cancel. + /// to cancel. /// Optional, . /// . public async Task CancelJobAsync(string jobId, CancellationToken cancellationToken = default) { using var response = await client.Client.PostAsync(GetUrl($"/jobs/{jobId}/cancel"), null!, cancellationToken).ConfigureAwait(false); var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - var result = JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions); + var result = response.Deserialize(responseAsString, client); return result.Status == JobStatus.Cancelled; } /// /// Get fine-grained status updates for a fine-tune job. /// - /// . + /// . /// . /// Optional, . /// List of events for . diff --git a/OpenAI-DotNet/FineTuning/HyperParameters.cs b/OpenAI-DotNet/FineTuning/HyperParameters.cs index 948f9b2f..4df7bb53 100644 --- a/OpenAI-DotNet/FineTuning/HyperParameters.cs +++ b/OpenAI-DotNet/FineTuning/HyperParameters.cs @@ -15,4 +15,4 @@ public sealed class HyperParameters [JsonPropertyName("learning_rate_multiplier")] public int? LearningRateMultiplier { get; set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/FineTuning/HyperParams.cs b/OpenAI-DotNet/FineTuning/HyperParams.cs index 5b5282b9..e6a2ca3e 100644 --- a/OpenAI-DotNet/FineTuning/HyperParams.cs +++ b/OpenAI-DotNet/FineTuning/HyperParams.cs @@ -18,4 +18,4 @@ public sealed class HyperParams [JsonPropertyName("learning_rate_multiplier")] public object LearningRateMultiplier { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/FineTuning/JobStatus.cs b/OpenAI-DotNet/FineTuning/JobStatus.cs index d5f6f8a5..e0095cc6 100644 --- a/OpenAI-DotNet/FineTuning/JobStatus.cs +++ b/OpenAI-DotNet/FineTuning/JobStatus.cs @@ -1,15 +1,23 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using System.Runtime.Serialization; + namespace OpenAI.FineTuning { public enum JobStatus { NotStarted = 0, + [EnumMember(Value = "validating_files")] ValidatingFiles, + [EnumMember(Value = "queued")] Queued, + [EnumMember(Value = "running")] Running, + [EnumMember(Value = "succeeded")] Succeeded, + [EnumMember(Value = "failed")] Failed, + [EnumMember(Value = "cancelled")] Cancelled } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Images/AbstractBaseImageRequest.cs b/OpenAI-DotNet/Images/AbstractBaseImageRequest.cs index a31a7174..e05b07f6 100644 --- a/OpenAI-DotNet/Images/AbstractBaseImageRequest.cs +++ b/OpenAI-DotNet/Images/AbstractBaseImageRequest.cs @@ -30,10 +30,10 @@ public abstract class AbstractBaseImageRequest /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// - protected AbstractBaseImageRequest(Model model = null, int numberOfResults = 1, ImageSize size = ImageSize.Large, ResponseFormat responseFormat = ResponseFormat.Url, string user = null) + protected AbstractBaseImageRequest(Model model = null, int numberOfResults = 1, ImageSize size = ImageSize.Large, ImageResponseFormat responseFormat = ImageResponseFormat.Url, string user = null) { Model = string.IsNullOrWhiteSpace(model?.Id) ? Models.Model.DallE_2 : model; Number = numberOfResults; @@ -65,12 +65,12 @@ protected AbstractBaseImageRequest(Model model = null, int numberOfResults = 1, /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// [JsonPropertyName("response_format")] - [JsonConverter(typeof(JsonStringEnumConverter))] + [JsonConverter(typeof(JsonStringEnumConverter))] [FunctionProperty("The format in which the generated images are returned. Must be one of url or b64_json.")] - public ResponseFormat ResponseFormat { get; } + public ImageResponseFormat ResponseFormat { get; } /// /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024. @@ -86,4 +86,4 @@ protected AbstractBaseImageRequest(Model model = null, int numberOfResults = 1, [FunctionProperty("A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.")] public string User { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Images/ImageEditRequest.cs b/OpenAI-DotNet/Images/ImageEditRequest.cs index ef5e6f05..f24ac3ed 100644 --- a/OpenAI-DotNet/Images/ImageEditRequest.cs +++ b/OpenAI-DotNet/Images/ImageEditRequest.cs @@ -30,7 +30,7 @@ public sealed class ImageEditRequest : AbstractBaseImageRequest, IDisposable /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// /// The model to use for image generation. @@ -41,7 +41,7 @@ public ImageEditRequest( int numberOfResults = 1, ImageSize size = ImageSize.Large, string user = null, - ResponseFormat responseFormat = ResponseFormat.Url, + ImageResponseFormat responseFormat = ImageResponseFormat.Url, Model model = null) : this(imagePath, null, prompt, numberOfResults, size, user, responseFormat, model) { @@ -73,7 +73,7 @@ public ImageEditRequest( /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// /// The model to use for image generation. @@ -85,7 +85,7 @@ public ImageEditRequest( int numberOfResults = 1, ImageSize size = ImageSize.Large, string user = null, - ResponseFormat responseFormat = ResponseFormat.Url, + ImageResponseFormat responseFormat = ImageResponseFormat.Url, Model model = null) : this( File.OpenRead(imagePath), @@ -124,7 +124,7 @@ public ImageEditRequest( /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// /// The model to use for image generation. @@ -136,7 +136,7 @@ public ImageEditRequest( int numberOfResults = 1, ImageSize size = ImageSize.Large, string user = null, - ResponseFormat responseFormat = ResponseFormat.Url, + ImageResponseFormat responseFormat = ImageResponseFormat.Url, Model model = null) : this(image, imageName, null, null, prompt, numberOfResults, size, user, responseFormat, model) { @@ -170,7 +170,7 @@ public ImageEditRequest( /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// /// The model to use for image generation. @@ -184,7 +184,7 @@ public ImageEditRequest( int numberOfResults = 1, ImageSize size = ImageSize.Large, string user = null, - ResponseFormat responseFormat = ResponseFormat.Url, + ImageResponseFormat responseFormat = ImageResponseFormat.Url, Model model = null) : base(model, numberOfResults, size, responseFormat, user) { diff --git a/OpenAI-DotNet/Images/ImageGenerationRequest.cs b/OpenAI-DotNet/Images/ImageGenerationRequest.cs index 3c93e686..acab3721 100644 --- a/OpenAI-DotNet/Images/ImageGenerationRequest.cs +++ b/OpenAI-DotNet/Images/ImageGenerationRequest.cs @@ -33,7 +33,7 @@ public sealed class ImageGenerationRequest /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// /// The size of the generated images. @@ -55,7 +55,7 @@ public ImageGenerationRequest( Model model = null, int numberOfResults = 1, string quality = null, - ResponseFormat responseFormat = ResponseFormat.Url, + ImageResponseFormat responseFormat = ImageResponseFormat.Url, string size = null, string style = null, string user = null) @@ -98,18 +98,18 @@ public ImageGenerationRequest( /// [JsonPropertyName("quality")] [FunctionProperty("The quality of the image that will be generated. hd creates images with finer details and greater consistency across the image. This param is only supported for dall-e-3.", - possibleValues: new object[] { "standard", "hd" })] + possibleValues: ["standard", "hd"])] public string Quality { get; } /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// [JsonPropertyName("response_format")] - [JsonConverter(typeof(JsonStringEnumConverter))] + [JsonConverter(typeof(JsonStringEnumConverter))] [FunctionProperty("The format in which the generated images are returned. Must be one of url or b64_json.", true)] - public ResponseFormat ResponseFormat { get; } + public ImageResponseFormat ResponseFormat { get; } /// /// The size of the generated images. diff --git a/OpenAI-DotNet/Images/ResponseFormat.cs b/OpenAI-DotNet/Images/ImageResponseFormat.cs similarity index 87% rename from OpenAI-DotNet/Images/ResponseFormat.cs rename to OpenAI-DotNet/Images/ImageResponseFormat.cs index 24c23246..340cba82 100644 --- a/OpenAI-DotNet/Images/ResponseFormat.cs +++ b/OpenAI-DotNet/Images/ImageResponseFormat.cs @@ -4,11 +4,11 @@ namespace OpenAI.Images { - public enum ResponseFormat + public enum ImageResponseFormat { [EnumMember(Value = "url")] Url, [EnumMember(Value = "b64_json")] B64_Json } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Images/ImageVariationRequest.cs b/OpenAI-DotNet/Images/ImageVariationRequest.cs index 34db426c..9e99fed9 100644 --- a/OpenAI-DotNet/Images/ImageVariationRequest.cs +++ b/OpenAI-DotNet/Images/ImageVariationRequest.cs @@ -26,7 +26,7 @@ public sealed class ImageVariationRequest : AbstractBaseImageRequest, IDisposabl /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// /// The model to use for image generation. @@ -36,7 +36,7 @@ public ImageVariationRequest( int numberOfResults = 1, ImageSize size = ImageSize.Large, string user = null, - ResponseFormat responseFormat = ResponseFormat.Url, + ImageResponseFormat responseFormat = ImageResponseFormat.Url, Model model = null) : this(File.OpenRead(imagePath), Path.GetFileName(imagePath), numberOfResults, size, user, responseFormat, model) { @@ -63,7 +63,7 @@ public ImageVariationRequest( /// /// The format in which the generated images are returned. /// Must be one of url or b64_json. - /// Defaults to + /// Defaults to /// /// /// The model to use for image generation. @@ -74,7 +74,7 @@ public ImageVariationRequest( int numberOfResults = 1, ImageSize size = ImageSize.Large, string user = null, - ResponseFormat responseFormat = ResponseFormat.Url, + ImageResponseFormat responseFormat = ImageResponseFormat.Url, Model model = null) : base(model, numberOfResults, size, responseFormat, user) { diff --git a/OpenAI-DotNet/Images/ImagesEndpoint.cs b/OpenAI-DotNet/Images/ImagesEndpoint.cs index 0d648458..54903038 100644 --- a/OpenAI-DotNet/Images/ImagesEndpoint.cs +++ b/OpenAI-DotNet/Images/ImagesEndpoint.cs @@ -25,20 +25,20 @@ internal ImagesEndpoint(OpenAIClient client) : base(client) { } /// /// Creates an image given a prompt. /// - /// + /// . /// Optional, . /// A list of generated texture urls to download. public async Task> GenerateImageAsync(ImageGenerationRequest request, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl("/generations"), jsonContent, cancellationToken).ConfigureAwait(false); - return await DeserializeResponseAsync(response, jsonContent, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl("/generations"), payload, cancellationToken).ConfigureAwait(false); + return await DeserializeResponseAsync(response, payload, cancellationToken).ConfigureAwait(false); } /// /// Creates an edited or extended image given an original image and a prompt. /// - /// + /// . /// Optional, . /// A list of generated texture urls to download. public async Task> CreateImageEditAsync(ImageEditRequest request, CancellationToken cancellationToken = default) @@ -73,7 +73,7 @@ public async Task> CreateImageEditAsync(ImageEditRequ /// /// Creates a variation of a given image. /// - /// + /// . /// Optional, . /// A list of generated texture urls to download. public async Task> CreateImageVariationAsync(ImageVariationRequest request, CancellationToken cancellationToken = default) @@ -98,7 +98,7 @@ public async Task> CreateImageVariationAsync(ImageVar private async Task> DeserializeResponseAsync(HttpResponseMessage response, HttpContent requestContent, CancellationToken cancellationToken = default) { - var resultAsString = await response.ReadAsStringAsync(EnableDebug, requestContent, null, cancellationToken).ConfigureAwait(false); + var resultAsString = await response.ReadAsStringAsync(EnableDebug, requestContent, cancellationToken).ConfigureAwait(false); var imagesResponse = response.Deserialize(resultAsString, client); if (imagesResponse?.Results is not { Count: not 0 }) diff --git a/OpenAI-DotNet/Models/Model.cs b/OpenAI-DotNet/Models/Model.cs index e3d54185..41fd3bcd 100644 --- a/OpenAI-DotNet/Models/Model.cs +++ b/OpenAI-DotNet/Models/Model.cs @@ -73,6 +73,18 @@ public Model(string id, string ownedBy = null) [JsonPropertyName("parent")] public string Parent { get; private set; } + /// + /// GPT-4o (“o” for “omni”) is our most advanced model. + /// It is multimodal (accepting text or image inputs and outputting text), + /// and it has the same high intelligence as GPT-4 Turbo but is much more efficient. + /// It generates text 2x faster and is 50% cheaper. + /// Additionally, GPT-4o has the best vision and performance across non-English languages of our models. + /// + /// + /// Context Window: 128,000 tokens + /// + public static Model GPT4o { get; } = new Model("gpt-4o", "openai"); + /// /// More capable than any GPT-3.5 model, able to do more complex tasks, and optimized for chat. /// Will be updated with our latest model iteration. @@ -124,16 +136,25 @@ public Model(string id, string ownedBy = null) /// /// The default model for . /// + /// + /// Output Dimension: 1,536 + /// public static Model Embedding_Ada_002 { get; } = new("text-embedding-ada-002", "openai"); /// /// A highly efficient model which provides a significant upgrade over its predecessor, the text-embedding-ada-002 model. /// + /// + /// Output Dimension: 1,536 + /// public static Model Embedding_3_Small { get; } = new("text-embedding-3-small", "openai"); /// - /// Most capable embedding model for both english and non-english tasks with embeddings of up to 3072 dimensions. + /// Most capable embedding model for both english and non-english tasks. /// + /// + /// Output Dimension: 3,072 + /// public static Model Embedding_3_Large { get; } = new("text-embedding-3-large", "openai"); /// diff --git a/OpenAI-DotNet/Models/ModelsEndpoint.cs b/OpenAI-DotNet/Models/ModelsEndpoint.cs index 3db14fff..c1b5b8f1 100644 --- a/OpenAI-DotNet/Models/ModelsEndpoint.cs +++ b/OpenAI-DotNet/Models/ModelsEndpoint.cs @@ -17,7 +17,7 @@ namespace OpenAI.Models /// public sealed class ModelsEndpoint : OpenAIBaseEndpoint { - private sealed class ModelsList + private sealed class ModelsList : BaseResponse { [JsonInclude] [JsonPropertyName("data")] @@ -39,7 +39,7 @@ public async Task> GetModelsAsync(CancellationToken cancell { using var response = await client.Client.GetAsync(GetUrl(), cancellationToken).ConfigureAwait(false); var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions)?.Models; + return response.Deserialize(responseAsString, client)?.Models; } /// @@ -77,7 +77,7 @@ public async Task DeleteFineTuneModelAsync(string modelId, CancellationTok { using var response = await client.Client.DeleteAsync(GetUrl($"/{model.Id}"), cancellationToken).ConfigureAwait(false); var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions)?.Deleted ?? false; + return response.Deserialize(responseAsString, client)?.Deleted ?? false; } catch (Exception e) { diff --git a/OpenAI-DotNet/Moderations/ModerationsEndpoint.cs b/OpenAI-DotNet/Moderations/ModerationsEndpoint.cs index 0645c471..d449c68d 100644 --- a/OpenAI-DotNet/Moderations/ModerationsEndpoint.cs +++ b/OpenAI-DotNet/Moderations/ModerationsEndpoint.cs @@ -46,13 +46,13 @@ public async Task GetModerationAsync(string input, string model = null, Ca /// /// Classifies if text violates OpenAI's Content Policy /// - /// + /// . /// Optional, . public async Task CreateModerationAsync(ModerationsRequest request, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl(), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } diff --git a/OpenAI-DotNet/OpenAI-DotNet.csproj b/OpenAI-DotNet/OpenAI-DotNet.csproj index efad3e8e..7965bded 100644 --- a/OpenAI-DotNet/OpenAI-DotNet.csproj +++ b/OpenAI-DotNet/OpenAI-DotNet.csproj @@ -14,7 +14,7 @@ An OpenAI API account is required. Forked from [OpenAI-API-dotnet](https://github.com/OkGoDoIt/OpenAI-API-dotnet). More context [on Roger Pincombe's blog](https://rogerpincombe.com/openai-dotnet-api). - + true https://github.com/RageAgainstThePixel/OpenAI-DotNet https://github.com/RageAgainstThePixel/OpenAI-DotNet @@ -28,8 +28,18 @@ More context [on Roger Pincombe's blog](https://rogerpincombe.com/openai-dotnet- OpenAI-DotNet.pfx True True - 7.7.8 + 8.0.0 +Version 8.0.0 +- Updated Assistants Beta v2 +- Added support for specifying project id +- Added BatchEndpoint +- Added VectorStoresEndpoint +- Added Message.ctr to specify specific tool call id, function name, and content +- Renamed OpenAI.Images.ResponseFormat to OpenAI.Images.ImageResponseFormat +- Changed ThreadEndpoint.CancelRunAsync return type from RunResponse to bool +- Fixed Json defined Tools/Functions being improperly added to tool cache +- Added Tool.TryUnregisterTool to remove a tool from the cache Version 7.7.8 - Updated OpenAIClientSettings.ctr to allow for domain http protocol override (i.e. http://localhost:8080 or http://0.0.0.0:8080) - Updated OpenAIClientSettings.BaseRequest public for easier access when implementing custom proxies. diff --git a/OpenAI-DotNet/OpenAIClient.cs b/OpenAI-DotNet/OpenAIClient.cs index 4b213e08..a875c0dd 100644 --- a/OpenAI-DotNet/OpenAIClient.cs +++ b/OpenAI-DotNet/OpenAIClient.cs @@ -2,6 +2,7 @@ using OpenAI.Assistants; using OpenAI.Audio; +using OpenAI.Batch; using OpenAI.Chat; using OpenAI.Embeddings; using OpenAI.Extensions; @@ -11,6 +12,7 @@ using OpenAI.Models; using OpenAI.Moderations; using OpenAI.Threads; +using OpenAI.VectorStores; using System; using System.Net.Http; using System.Net.Http.Headers; @@ -67,12 +69,11 @@ public OpenAIClient(OpenAIAuthentication openAIAuthentication = null, OpenAIClie ModerationsEndpoint = new ModerationsEndpoint(this); ThreadsEndpoint = new ThreadsEndpoint(this); AssistantsEndpoint = new AssistantsEndpoint(this); + BatchEndpoint = new BatchEndpoint(this); + VectorStoresEndpoint = new VectorStoresEndpoint(this); } - ~OpenAIClient() - { - Dispose(false); - } + ~OpenAIClient() => Dispose(false); #region IDisposable @@ -165,7 +166,7 @@ private void Dispose(bool disposing) public AudioEndpoint AudioEndpoint { get; } /// - /// Files are used to upload documents that can be used with features like Fine-tuning.
+ /// Files are used to upload documents that can be used with features like Assistants, Fine-tuning, and Batch API.
/// ///
public FilesEndpoint FilesEndpoint { get; } @@ -196,6 +197,19 @@ private void Dispose(bool disposing) ///
public ThreadsEndpoint ThreadsEndpoint { get; } + /// + /// Create large batches of API requests for asynchronous processing. + /// The Batch API returns completions within 24 hours for a 50% discount. + /// + /// + public BatchEndpoint BatchEndpoint { get; } + + /// + /// Vector stores are used to store files for use by the file_search tool. + /// + /// + public VectorStoresEndpoint VectorStoresEndpoint { get; } + #endregion Endpoints private HttpClient SetupClient(HttpClient client = null) @@ -213,7 +227,7 @@ private HttpClient SetupClient(HttpClient client = null) } client.DefaultRequestHeaders.Add("User-Agent", "OpenAI-DotNet"); - client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v1"); + client.DefaultRequestHeaders.Add("OpenAI-Beta", "assistants=v2"); if (OpenAIClientSettings.BaseRequestUrlFormat.Contains(OpenAIClientSettings.OpenAIDomain) && (string.IsNullOrWhiteSpace(OpenAIAuthentication.ApiKey) || @@ -237,7 +251,12 @@ private HttpClient SetupClient(HttpClient client = null) client.DefaultRequestHeaders.Add("OpenAI-Organization", OpenAIAuthentication.OrganizationId); } + if (!string.IsNullOrWhiteSpace(OpenAIAuthentication.ProjectId)) + { + client.DefaultRequestHeaders.Add("OpenAI-Project", OpenAIAuthentication.ProjectId); + } + return client; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/Attachment.cs b/OpenAI-DotNet/Threads/Attachment.cs new file mode 100644 index 00000000..e6947308 --- /dev/null +++ b/OpenAI-DotNet/Threads/Attachment.cs @@ -0,0 +1,47 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; + +namespace OpenAI.Threads +{ + public sealed class Attachment + { + public Attachment() { } + + /// + /// Constructor. + /// + /// The ID of the file to attach to the message. + /// The tool to add this file to. + public Attachment(string fileId, Tool tool) : this(fileId, new[] { tool }) { } + + /// + /// Constructor. + /// + /// The ID of the file to attach to the message. + /// The tools to add this file to. + public Attachment(string fileId, IEnumerable tools) + { + FileId = fileId; + Tools = tools?.ToList(); + } + + /// + /// The ID of the file to attach to the message. + /// + [JsonInclude] + [JsonPropertyName("file_id")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public string FileId { get; private set; } + + /// + /// The tools to add this file to. + /// + [JsonInclude] + [JsonPropertyName("tools")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyList Tools { get; private set; } + } +} diff --git a/OpenAI-DotNet/Threads/CodeInterpreter.cs b/OpenAI-DotNet/Threads/CodeInterpreter.cs index e48723fb..e1713536 100644 --- a/OpenAI-DotNet/Threads/CodeInterpreter.cs +++ b/OpenAI-DotNet/Threads/CodeInterpreter.cs @@ -1,6 +1,7 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. using System.Collections.Generic; +using System.Linq; using System.Text.Json.Serialization; namespace OpenAI.Threads @@ -14,6 +15,8 @@ public sealed class CodeInterpreter [JsonPropertyName("input")] public string Input { get; private set; } + private List outputs; + /// /// The outputs from the Code Interpreter tool call. /// Code Interpreter can output one or more items, including text (logs) or images (image). @@ -21,6 +24,26 @@ public sealed class CodeInterpreter /// [JsonInclude] [JsonPropertyName("outputs")] - public IReadOnlyList Outputs { get; private set; } + public IReadOnlyList Outputs + { + get => outputs; + private set => outputs = value?.ToList(); + } + + internal void AppendFrom(CodeInterpreter other) + { + if (other == null) { return; } + + if (!string.IsNullOrWhiteSpace(other.Input)) + { + Input += other.Input; + } + + if (other.Outputs != null) + { + outputs ??= new List(); + outputs.AddRange(other.Outputs); + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/CodeInterpreterOutputType.cs b/OpenAI-DotNet/Threads/CodeInterpreterOutputType.cs index 727afe95..0f2605a1 100644 --- a/OpenAI-DotNet/Threads/CodeInterpreterOutputType.cs +++ b/OpenAI-DotNet/Threads/CodeInterpreterOutputType.cs @@ -11,4 +11,4 @@ public enum CodeInterpreterOutputType [EnumMember(Value = "image")] Image } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/CodeInterpreterOutputs.cs b/OpenAI-DotNet/Threads/CodeInterpreterOutputs.cs index 9efe3c3a..7859e240 100644 --- a/OpenAI-DotNet/Threads/CodeInterpreterOutputs.cs +++ b/OpenAI-DotNet/Threads/CodeInterpreterOutputs.cs @@ -5,8 +5,12 @@ namespace OpenAI.Threads { - public sealed class CodeInterpreterOutputs + public sealed class CodeInterpreterOutputs : IAppendable { + [JsonInclude] + [JsonPropertyName("index")] + public int? Index { get; private set; } + /// /// Output type. Can be either 'logs' or 'image'. /// @@ -28,5 +32,32 @@ public sealed class CodeInterpreterOutputs [JsonInclude] [JsonPropertyName("image")] public ImageFile Image { get; private set; } + + public void AppendFrom(CodeInterpreterOutputs other) + { + if (other == null) { return; } + + if (Type == 0 && other.Type > 0) + { + Type = other.Type; + } + + if (!string.IsNullOrWhiteSpace(other.Logs)) + { + Logs += other.Logs; + } + + if (other.Image != null) + { + if (Image == null) + { + Image = other.Image; + } + else + { + Image.AppendFrom(other.Image); + } + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/Content.cs b/OpenAI-DotNet/Threads/Content.cs deleted file mode 100644 index 597b28b5..00000000 --- a/OpenAI-DotNet/Threads/Content.cs +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using OpenAI.Extensions; -using System; -using System.Text.Json.Serialization; - -namespace OpenAI.Threads -{ - public sealed class Content - { - [JsonInclude] - [JsonPropertyName("type")] - [JsonConverter(typeof(JsonStringEnumConverter))] - public ContentType Type { get; private set; } - - [JsonInclude] - [JsonPropertyName("text")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public TextContent Text { get; private set; } - - [JsonInclude] - [JsonPropertyName("image_file")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImageFile ImageFile { get; private set; } - - public override string ToString() - => Type switch - { - ContentType.Text => Text.Value, - ContentType.ImageFile => ImageFile.FileId, - _ => throw new ArgumentOutOfRangeException() - }; - } -} \ No newline at end of file diff --git a/OpenAI-DotNet/Threads/ContentText.cs b/OpenAI-DotNet/Threads/ContentText.cs deleted file mode 100644 index 11851941..00000000 --- a/OpenAI-DotNet/Threads/ContentText.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace OpenAI.Threads -{ - public sealed class ContentText - { - public ContentText(string value) => Value = value; - - /// - /// The data that makes up the text. - /// - [JsonInclude] - [JsonPropertyName("value")] - public string Value { get; private set; } - - /// - /// Annotations. - /// - [JsonInclude] - [JsonPropertyName("annotations")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public IReadOnlyList Annotations { get; private set; } - - public static implicit operator ContentText(string value) => new(value); - - public static implicit operator string(ContentText text) => text?.ToString(); - - public override string ToString() => Value; - } -} \ No newline at end of file diff --git a/OpenAI-DotNet/Threads/CreateMessageRequest.cs b/OpenAI-DotNet/Threads/CreateMessageRequest.cs index 5b1bbaba..7622a5c8 100644 --- a/OpenAI-DotNet/Threads/CreateMessageRequest.cs +++ b/OpenAI-DotNet/Threads/CreateMessageRequest.cs @@ -1,26 +1,73 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using System; using System.Collections.Generic; using System.Linq; using System.Text.Json.Serialization; namespace OpenAI.Threads { + /// + /// Create a message on a thread. + /// + [Obsolete("use Thread.Message instead.")] public sealed class CreateMessageRequest { public static implicit operator CreateMessageRequest(string content) => new(content); + public static implicit operator CreateMessageRequest(Message message) => new(message.Content, message.Role, message.Attachments, message.Metadata); + + public static implicit operator Message(CreateMessageRequest request) => new(request.Content, request.Role, request.Attachments, request.Metadata); + + [Obsolete("Removed")] + public CreateMessageRequest(string content, IEnumerable fileIds, IReadOnlyDictionary metadata = null) + { + } + /// /// Constructor. /// - /// - /// - /// - public CreateMessageRequest(string content, IEnumerable fileIds = null, IReadOnlyDictionary metadata = null) + /// + /// The contents of the message. + /// + /// + /// The role of the entity that is creating the message. + /// + /// + /// A list of files attached to the message, and the tools they were added to. + /// + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + public CreateMessageRequest(string content, Role role = Role.User, IEnumerable attachments = null, IReadOnlyDictionary metadata = null) + : this(new List { new(content) }, role, attachments, metadata) + { + } + + /// + /// Constructor. + /// + /// + /// The contents of the message. + /// + /// + /// The role of the entity that is creating the message. + /// + /// + /// A list of files attached to the message, and the tools they were added to. + /// + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + public CreateMessageRequest(IEnumerable content, Role role = Role.User, IEnumerable attachments = null, IReadOnlyDictionary metadata = null) { - Role = Role.User; - Content = content; - FileIds = fileIds?.ToList(); + Content = content?.ToList(); + Role = role; + Attachments = attachments?.ToList(); Metadata = metadata; } @@ -34,17 +81,19 @@ public CreateMessageRequest(string content, IEnumerable fileIds = null, public Role Role { get; } /// - /// The content of the message. + /// The contents of the message. /// + [JsonInclude] [JsonPropertyName("content")] - public string Content { get; } + [JsonIgnore(Condition = JsonIgnoreCondition.Never)] + public IReadOnlyList Content { get; private set; } /// - /// A list of File IDs that the message should use. There can be a maximum of 10 files attached to a message. - /// Useful for tools like retrieval and code_interpreter that can access and use files. + /// A list of files attached to the message, and the tools they were added to. /// - [JsonPropertyName("file_ids")] - public IReadOnlyList FileIds { get; } + [JsonInclude] + [JsonPropertyName("Attachments")] + public IReadOnlyList Attachments { get; private set; } /// /// Set of 16 key-value pairs that can be attached to an object. @@ -54,4 +103,4 @@ public CreateMessageRequest(string content, IEnumerable fileIds = null, [JsonPropertyName("metadata")] public IReadOnlyDictionary Metadata { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/CreateRunRequest.cs b/OpenAI-DotNet/Threads/CreateRunRequest.cs index 144a732d..b6db36a1 100644 --- a/OpenAI-DotNet/Threads/CreateRunRequest.cs +++ b/OpenAI-DotNet/Threads/CreateRunRequest.cs @@ -1,26 +1,184 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; +using System; using System.Collections.Generic; using System.Linq; using System.Text.Json.Serialization; namespace OpenAI.Threads { + /// + /// Create a run on a thread. + /// public sealed class CreateRunRequest { + /// + /// Constructor. + /// + /// + /// The ID of the assistant used for execution of this run. + /// + /// . public CreateRunRequest(string assistantId, CreateRunRequest request) - : this(assistantId, request?.Model, request?.Instructions, request?.Tools, request?.Metadata, request?.Temperature) + : this( + assistantId, + request?.Model, + request?.Instructions, + request?.AdditionalInstructions, + request?.AdditionalMessages, + request?.Tools, + request?.Metadata, + request?.Temperature, + request?.TopP, + request?.Stream ?? false, + request?.MaxPromptTokens, + request?.MaxCompletionTokens, + request?.TruncationStrategy, + request?.ToolChoice as string ?? ((Tool)request?.ToolChoice)?.Function?.Name, + request?.ParallelToolCalls, + request?.ResponseFormat ?? ChatResponseFormat.Auto) { } - public CreateRunRequest(string assistantId, string model = null, string instructions = null, IEnumerable tools = null, IReadOnlyDictionary metadata = null, double? temperature = null) + /// + /// Constructor. + /// + /// + /// The ID of the assistant used for execution of this run. + /// + /// + /// The model that the assistant used for this run. + /// + /// + /// The instructions that the assistant used for this run. + /// + /// + /// Appends additional instructions at the end of the instructions for the run. + /// This is useful for modifying the behavior on a per-run basis without overriding other instructions. + /// + /// + /// Adds additional messages to the thread before creating the run. + /// + /// + /// The list of tools that the assistant used for this run. + /// + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + /// + /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output + /// more random, while lower values like 0.2 will make it more focused and deterministic. + /// When null the default temperature (1) will be used. + /// + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// We generally recommend altering this or temperature but not both. + /// + /// + /// If true, returns a stream of events that happen during the Run as server-sent events, + /// terminating when the Run enters a terminal state with a 'data: [DONE]' message. + /// + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of prompt tokens specified, + /// across multiple turns of the run. If the run exceeds the number of prompt tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of completion tokens specified, + /// across multiple turns of the run. If the run exceeds the number of completion tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + /// + /// Controls for how a thread will be truncated prior to the run. + /// Use this to control the initial context window of the run. + /// + /// + /// Controls which (if any) tool is called by the model. + /// none means the model will not call any tools and instead generates a message. + /// auto is the default value and means the model can pick between generating a message or calling one or more tools. + /// required means the model must call one or more tools before responding to the user. + /// Specifying a particular tool like {"type": "file_search"} or {"type": "function", "function": {"name": "my_function"}} + /// forces the model to call that tool. + /// + /// + /// Whether to enable parallel function calling during tool use. + /// + /// + /// An object specifying the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON.
+ /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + public CreateRunRequest( + string assistantId, + string model = null, + string instructions = null, + string additionalInstructions = null, + IEnumerable additionalMessages = null, + IEnumerable tools = null, + IReadOnlyDictionary metadata = null, + double? temperature = null, + double? topP = null, + bool stream = false, + int? maxPromptTokens = null, + int? maxCompletionTokens = null, + TruncationStrategy truncationStrategy = null, + string toolChoice = null, + bool? parallelToolCalls = null, + ChatResponseFormat responseFormat = ChatResponseFormat.Auto) { AssistantId = assistantId; Model = model; Instructions = instructions; - Tools = tools?.ToList(); + AdditionalInstructions = additionalInstructions; + AdditionalMessages = additionalMessages?.ToList(); + + var toolList = tools?.ToList(); + + if (toolList != null && toolList.Any()) + { + if (string.IsNullOrWhiteSpace(toolChoice)) + { + ToolChoice = "auto"; + } + else + { + if (!toolChoice.Equals("none") && + !toolChoice.Equals("required") && + !toolChoice.Equals("auto")) + { + var tool = toolList.FirstOrDefault(t => t.Function.Name.Contains(toolChoice)) ?? + throw new ArgumentException($"The specified tool choice '{toolChoice}' was not found in the list of tools"); + ToolChoice = new { type = "function", function = new { name = tool.Function.Name } }; + } + else + { + ToolChoice = toolChoice; + } + } + } + + Tools = toolList?.ToList(); Metadata = metadata; Temperature = temperature; + TopP = topP; + Stream = stream; + MaxPromptTokens = maxPromptTokens; + MaxCompletionTokens = maxCompletionTokens; + TruncationStrategy = truncationStrategy; + ParallelToolCalls = parallelToolCalls; + ResponseFormat = responseFormat; } /// @@ -41,6 +199,19 @@ public CreateRunRequest(string assistantId, string model = null, string instruct [JsonPropertyName("instructions")] public string Instructions { get; } + /// + /// Appends additional instructions at the end of the instructions for the run. + /// This is useful for modifying the behavior on a per-run basis without overriding other instructions. + /// + [JsonPropertyName("additional_instructions")] + public string AdditionalInstructions { get; } + + /// + /// Adds additional messages to the thread before creating the run. + /// + [JsonPropertyName("additional_messages")] + public IReadOnlyList AdditionalMessages { get; } + /// /// The list of tools that the assistant used for this run. /// @@ -60,8 +231,87 @@ public CreateRunRequest(string assistantId, string model = null, string instruct /// more random, while lower values like 0.2 will make it more focused and deterministic. /// When null the default temperature (1) will be used. /// - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] [JsonPropertyName("temperature")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public double? Temperature { get; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// We generally recommend altering this or temperature but not both. + /// + [JsonPropertyName("top_p")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? TopP { get; } + + /// + /// If true, returns a stream of events that happen during the Run as server-sent events, + /// terminating when the Run enters a terminal state with a 'data: [DONE]' message. + /// + [JsonPropertyName("stream")] + public bool Stream { get; internal set; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of prompt tokens specified, + /// across multiple turns of the run. If the run exceeds the number of prompt tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + [JsonPropertyName("max_prompt_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; } + + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of completion tokens specified, + /// across multiple turns of the run. If the run exceeds the number of completion tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + [JsonPropertyName("max_completion_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; } + + /// + /// Controls for how a thread will be truncated prior to the run. + /// Use this to control the initial context window of the run. + /// + [JsonPropertyName("truncation_strategy")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public TruncationStrategy TruncationStrategy { get; } + + /// + /// Controls which (if any) tool is called by the model. + /// none means the model will not call any tools and instead generates a message. + /// auto is the default value and means the model can pick between generating a message or calling one or more tools. + /// required means the model must call one or more tools before responding to the user. + /// Specifying a particular tool like {"type": "file_search"} or {"type": "function", "function": {"name": "my_function"}} + /// forces the model to call that tool. + /// + [JsonPropertyName("tool_choice")] + public dynamic ToolChoice { get; } + + /// + /// Whether to enable parallel function calling during tool use. + /// + [JsonPropertyName("parallel_tool_calls")] + public bool? ParallelToolCalls { get; } + + /// + /// An object specifying the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON. + /// + /// + /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + [JsonPropertyName("response_format")] + [JsonConverter(typeof(ResponseFormatConverter))] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ChatResponseFormat ResponseFormat { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/CreateThreadAndRunRequest.cs b/OpenAI-DotNet/Threads/CreateThreadAndRunRequest.cs index b00d4354..87312dd1 100644 --- a/OpenAI-DotNet/Threads/CreateThreadAndRunRequest.cs +++ b/OpenAI-DotNet/Threads/CreateThreadAndRunRequest.cs @@ -1,14 +1,39 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; +using System; using System.Collections.Generic; +using System.Linq; using System.Text.Json.Serialization; namespace OpenAI.Threads { public sealed class CreateThreadAndRunRequest { + /// + /// Constructor. + /// + /// + /// The ID of the assistant to use to execute this run. + /// + /// . public CreateThreadAndRunRequest(string assistantId, CreateThreadAndRunRequest request) - : this(assistantId, request?.Model, request?.Instructions, request?.Tools, request?.Metadata, request?.Temperature) + : this( + assistantId, + request?.Model, + request?.Instructions, + request?.Tools, + request?.ToolResources, + request?.Metadata, + request?.Temperature, + request?.TopP, + request?.Stream ?? false, + request?.MaxPromptTokens, + request?.MaxCompletionTokens, + request?.TruncationStrategy, + request?.ToolChoice as string ?? ((Tool)request?.ToolChoice)?.Function?.Name, + request?.ParallelToolCalls, + request?.ResponseFormat ?? ChatResponseFormat.Auto) { } @@ -31,6 +56,12 @@ public CreateThreadAndRunRequest(string assistantId, CreateThreadAndRunRequest r /// Override the tools the assistant can use for this run. /// This is useful for modifying the behavior on a per-run basis. /// + /// + /// A set of resources that are used by the assistant's tools. + /// The resources are specific to the type of tool. + /// For example, the 'code_interpreter' tool requires a list of file IDs, + /// while the 'file_search' tool requires a list of vector store IDs. + /// /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. @@ -41,18 +72,115 @@ public CreateThreadAndRunRequest(string assistantId, CreateThreadAndRunRequest r /// more random, while lower values like 0.2 will make it more focused and deterministic. /// When null the default temperature (1) will be used. /// + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// We generally recommend altering this or temperature but not both. + /// + /// + /// If true, returns a stream of events that happen during the Run as server-sent events, + /// terminating when the Run enters a terminal state with a 'data: [DONE]' message. + /// + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of prompt tokens specified, + /// across multiple turns of the run. If the run exceeds the number of prompt tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of completion tokens specified, + /// across multiple turns of the run. If the run exceeds the number of completion tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + /// + /// Controls for how a thread will be truncated prior to the run. + /// Use this to control the initial context window of the run. + /// + /// + /// Controls which (if any) tool is called by the model. + /// none means the model will not call any tools and instead generates a message. + /// auto is the default value and means the model can pick between generating a message or calling one or more tools. + /// required means the model must call one or more tools before responding to the user. + /// Specifying a particular tool like {"type": "file_search"} or {"type": "function", "function": {"name": "my_function"}} + /// forces the model to call that tool. + /// + /// + /// Whether to enable parallel function calling during tool use. + /// + /// + /// An object specifying the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON.
+ /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// /// /// Optional, . /// - public CreateThreadAndRunRequest(string assistantId, string model = null, string instructions = null, IReadOnlyList tools = null, IReadOnlyDictionary metadata = null, double? temperature = null, CreateThreadRequest createThreadRequest = null) + public CreateThreadAndRunRequest( + string assistantId, + string model = null, + string instructions = null, + IReadOnlyList tools = null, + ToolResources toolResources = null, + IReadOnlyDictionary metadata = null, + double? temperature = null, + double? topP = null, + bool stream = false, + int? maxPromptTokens = null, + int? maxCompletionTokens = null, + TruncationStrategy truncationStrategy = null, + string toolChoice = null, + bool? parallelToolCalls = null, + ChatResponseFormat responseFormat = ChatResponseFormat.Auto, + CreateThreadRequest createThreadRequest = null) { AssistantId = assistantId; Model = model; Instructions = instructions; - Tools = tools; + + var toolList = tools?.ToList(); + + if (toolList != null && toolList.Any()) + { + if (string.IsNullOrWhiteSpace(toolChoice)) + { + ToolChoice = "auto"; + } + else + { + if (!toolChoice.Equals("none") && + !toolChoice.Equals("required") && + !toolChoice.Equals("auto")) + { + var tool = toolList.FirstOrDefault(t => t.Function.Name.Contains(toolChoice)) ?? + throw new ArgumentException($"The specified tool choice '{toolChoice}' was not found in the list of tools"); + ToolChoice = new { type = "function", function = new { name = tool.Function.Name } }; + } + else + { + ToolChoice = toolChoice; + } + } + } + + Tools = toolList?.ToList(); + ToolResources = toolResources; Metadata = metadata; - ThreadRequest = createThreadRequest; Temperature = temperature; + TopP = topP; + Stream = stream; + MaxPromptTokens = maxPromptTokens; + MaxCompletionTokens = maxCompletionTokens; + TruncationStrategy = truncationStrategy; + ResponseFormat = responseFormat; + ParallelToolCalls = parallelToolCalls; + ThreadRequest = createThreadRequest; } /// @@ -77,12 +205,20 @@ public CreateThreadAndRunRequest(string assistantId, string model = null, string public string Instructions { get; } /// - /// Override the tools the assistant can use for this run. - /// This is useful for modifying the behavior on a per-run basis. + /// The list of tools that the assistant used for this run. /// [JsonPropertyName("tools")] public IReadOnlyList Tools { get; } + /// + /// A set of resources that are used by the assistant's tools. + /// The resources are specific to the type of tool. + /// For example, the 'code_interpreter' tool requires a list of file IDs, + /// while the 'file_search' tool requires a list of vector store IDs. + /// + [JsonPropertyName("tool_resources")] + public ToolResources ToolResources { get; } + /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. @@ -96,11 +232,93 @@ public CreateThreadAndRunRequest(string assistantId, string model = null, string /// more random, while lower values like 0.2 will make it more focused and deterministic. /// When null the default temperature (1) will be used. /// - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] [JsonPropertyName("temperature")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public double? Temperature { get; } + /// + /// An alternative to sampling with temperature, called nucleus sampling, + /// where the model considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// We generally recommend altering this or temperature but not both. + /// + [JsonPropertyName("top_p")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? TopP { get; } + + /// + /// If true, returns a stream of events that happen during the Run as server-sent events, + /// terminating when the Run enters a terminal state with a 'data: [DONE]' message. + /// + [JsonPropertyName("stream")] + public bool Stream { get; internal set; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of prompt tokens specified, + /// across multiple turns of the run. If the run exceeds the number of prompt tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + [JsonPropertyName("max_prompt_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; } + + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// The run will make a best effort to use only the number of completion tokens specified, + /// across multiple turns of the run. If the run exceeds the number of completion tokens specified, + /// the run will end with status 'incomplete'. See 'incomplete_details' for more info. + /// + [JsonPropertyName("max_completion_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; } + + /// + /// Controls for how a thread will be truncated prior to the run. + /// Use this to control the initial context window of the run. + /// + [JsonPropertyName("truncation_strategy")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public TruncationStrategy TruncationStrategy { get; } + + /// + /// Controls which (if any) tool is called by the model. + /// none means the model will not call any tools and instead generates a message. + /// auto is the default value and means the model can pick between generating a message or calling one or more tools. + /// required means the model must call one or more tools before responding to the user. + /// Specifying a particular tool like {"type": "file_search"} or {"type": "function", "function": {"name": "my_function"}} + /// forces the model to call that tool. + /// + [JsonPropertyName("tool_choice")] + public dynamic ToolChoice { get; } + + /// + /// Whether to enable parallel function calling during tool use. + /// + [JsonPropertyName("parallel_tool_calls")] + public bool? ParallelToolCalls { get; } + + /// + /// An object specifying the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON. + /// + /// + /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + [JsonPropertyName("response_format")] + [JsonConverter(typeof(ResponseFormatConverter))] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ChatResponseFormat ResponseFormat { get; } + + /// + /// The optional options to use. + /// [JsonPropertyName("thread")] public CreateThreadRequest ThreadRequest { get; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/CreateThreadRequest.cs b/OpenAI-DotNet/Threads/CreateThreadRequest.cs index 8f1568cf..be12ba5e 100644 --- a/OpenAI-DotNet/Threads/CreateThreadRequest.cs +++ b/OpenAI-DotNet/Threads/CreateThreadRequest.cs @@ -14,23 +14,43 @@ public sealed class CreateThreadRequest /// /// A list of messages to start the thread with. /// + /// + /// A set of resources that are made available to the assistant's tools in this thread. + /// The resources are specific to the type of tool. + /// For example, the code_interpreter tool requires a list of file IDs, + /// while the file_search tool requires a list of vector store IDs. + /// /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. /// - public CreateThreadRequest(IEnumerable messages = null, IReadOnlyDictionary metadata = null) + public CreateThreadRequest(IEnumerable messages = null, ToolResources toolResources = null, IReadOnlyDictionary metadata = null) { Messages = messages?.ToList(); + ToolResources = toolResources; Metadata = metadata; } + public CreateThreadRequest(string message) : this(new[] { new Message(message) }) + { + } + /// /// A list of messages to start the thread with. /// [JsonPropertyName("messages")] public IReadOnlyList Messages { get; } + /// + /// A set of resources that are made available to the assistant's tools in this thread. + /// The resources are specific to the type of tool. + /// For example, the code_interpreter tool requires a list of file IDs, + /// while the file_search tool requires a list of vector store IDs. + /// + [JsonPropertyName("tool_resources")] + public ToolResources ToolResources { get; } + /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. @@ -39,6 +59,6 @@ public CreateThreadRequest(IEnumerable messages = null, IReadOnlyDictio [JsonPropertyName("metadata")] public IReadOnlyDictionary Metadata { get; } - public static implicit operator CreateThreadRequest(string message) => new(new[] { new Message(message) }); + public static implicit operator CreateThreadRequest(string message) => new(message); } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/FunctionCall.cs b/OpenAI-DotNet/Threads/FunctionCall.cs index 6dbaa915..1c2029ef 100644 --- a/OpenAI-DotNet/Threads/FunctionCall.cs +++ b/OpenAI-DotNet/Threads/FunctionCall.cs @@ -26,5 +26,25 @@ public sealed class FunctionCall [JsonInclude] [JsonPropertyName("output")] public string Output { get; private set; } + + internal void AppendFrom(FunctionCall other) + { + if (other == null) { return; } + + if (!string.IsNullOrWhiteSpace(other.Name)) + { + Name += other.Name; + } + + if (!string.IsNullOrWhiteSpace(other.Arguments)) + { + Arguments += other.Arguments; + } + + if (!string.IsNullOrWhiteSpace(other.Output)) + { + Output += other.Output; + } + } } } diff --git a/OpenAI-DotNet/Threads/IncompleteDetails.cs b/OpenAI-DotNet/Threads/IncompleteDetails.cs new file mode 100644 index 00000000..e85dc41c --- /dev/null +++ b/OpenAI-DotNet/Threads/IncompleteDetails.cs @@ -0,0 +1,13 @@ +using OpenAI.Extensions; +using System.Text.Json.Serialization; + +namespace OpenAI.Threads +{ + public sealed class IncompleteDetails + { + [JsonInclude] + [JsonPropertyName("reason")] + [JsonConverter(typeof(JsonStringEnumConverter))] + public IncompleteMessageReason Reason { get; private set; } + } +} diff --git a/OpenAI-DotNet/Threads/IncompleteMessageReason.cs b/OpenAI-DotNet/Threads/IncompleteMessageReason.cs new file mode 100644 index 00000000..946a51b9 --- /dev/null +++ b/OpenAI-DotNet/Threads/IncompleteMessageReason.cs @@ -0,0 +1,23 @@ +using System.Runtime.Serialization; + +namespace OpenAI.Threads +{ + public enum IncompleteMessageReason + { + None = 0, + [EnumMember(Value = "content_filter")] + ContentFilter, + [EnumMember(Value = "max_tokens")] + MaxTokens, + [EnumMember(Value = "max_completion_tokens")] + MaxCompletionTokens, + [EnumMember(Value = "max_prompt_tokens")] + MaxPromptTokens, + [EnumMember(Value = "run_cancelled")] + RunCancelled, + [EnumMember(Value = "run_expired")] + RunExpired, + [EnumMember(Value = "run_failed")] + RunFailed + } +} diff --git a/OpenAI-DotNet/Threads/Message.cs b/OpenAI-DotNet/Threads/Message.cs index f2e539c1..de2d3a67 100644 --- a/OpenAI-DotNet/Threads/Message.cs +++ b/OpenAI-DotNet/Threads/Message.cs @@ -14,45 +14,78 @@ public sealed class Message /// Constructor. /// /// - /// The content of the message. + /// The contents of the message. + /// + /// + /// The role of the entity that is creating the message. + /// + /// + /// A list of files attached to the message, and the tools they were added to. + /// + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + public Message( + string content, + Role role = Role.User, + IEnumerable attachments = null, + IReadOnlyDictionary metadata = null) + : this(new List { new(content) }, role, attachments, metadata) + { + } + + /// + /// Constructor. + /// + /// + /// The contents of the message. + /// + /// + /// The role of the entity that is creating the message. + /// + /// + /// A list of files attached to the message, and the tools they were added to. /// - /// - /// A list of File IDs that the message should use. - /// There can be a maximum of 10 files attached to a message. - /// Useful for tools like 'retrieval' and 'code_interpreter' that can access and use files. /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. /// - public Message(string content, IEnumerable fileIds = null, IReadOnlyDictionary metadata = null) + public Message( + IEnumerable content, + Role role = Role.User, + IEnumerable attachments = null, + IReadOnlyDictionary metadata = null) { - Role = Role.User; - Content = content; - FileIds = fileIds?.ToList(); + Content = content?.ToList(); + Role = role; + Attachments = attachments?.ToList(); Metadata = metadata; } /// /// The role of the entity that is creating the message. - /// Currently only user is supported. /// [JsonPropertyName("role")] public Role Role { get; } /// - /// The content of the message. + /// The contents of the message. /// + [JsonInclude] [JsonPropertyName("content")] - public string Content { get; } + [JsonIgnore(Condition = JsonIgnoreCondition.Never)] + public IReadOnlyList Content { get; private set; } /// - /// A list of File IDs that the message should use. - /// There can be a maximum of 10 files attached to a message. - /// Useful for tools like 'retrieval' and 'code_interpreter' that can access and use files. + /// A list of files attached to the message, and the tools they were added to. /// - [JsonPropertyName("file_ids")] - public IReadOnlyList FileIds { get; } + [JsonInclude] + [JsonPropertyName("attachments")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyList Attachments { get; private set; } /// /// Set of 16 key-value pairs that can be attached to an object. @@ -60,6 +93,14 @@ public Message(string content, IEnumerable fileIds = null, IReadOnlyDict /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. /// [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public IReadOnlyDictionary Metadata { get; } + + /// + /// Formats all of the items into a single string, + /// putting each item on a new line. + /// + /// of all . + public string PrintContent() => string.Join("\n", Content.Select(content => content?.ToString())); } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/MessageDelta.cs b/OpenAI-DotNet/Threads/MessageDelta.cs new file mode 100644 index 00000000..0f280608 --- /dev/null +++ b/OpenAI-DotNet/Threads/MessageDelta.cs @@ -0,0 +1,18 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI.Threads +{ + public sealed class MessageDelta + { + [JsonInclude] + [JsonPropertyName("role")] + public Role Role { get; private set; } + + [JsonInclude] + [JsonPropertyName("content")] + public IReadOnlyList Content { get; private set; } + } +} diff --git a/OpenAI-DotNet/Threads/MessageFileResponse.cs b/OpenAI-DotNet/Threads/MessageFileResponse.cs index ad483d8b..f29535dc 100644 --- a/OpenAI-DotNet/Threads/MessageFileResponse.cs +++ b/OpenAI-DotNet/Threads/MessageFileResponse.cs @@ -5,6 +5,7 @@ namespace OpenAI.Threads { + [Obsolete("Removed. Use Assistant.ToolResources instead.")] public sealed class MessageFileResponse : BaseResponse { /// @@ -42,4 +43,4 @@ public sealed class MessageFileResponse : BaseResponse public override string ToString() => Id; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/MessageResponse.cs b/OpenAI-DotNet/Threads/MessageResponse.cs index 503133de..9ea193f7 100644 --- a/OpenAI-DotNet/Threads/MessageResponse.cs +++ b/OpenAI-DotNet/Threads/MessageResponse.cs @@ -1,5 +1,6 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System; using System.Collections.Generic; using System.Linq; @@ -12,8 +13,12 @@ namespace OpenAI.Threads /// Messages can include text, images, and other files. /// Messages stored as a list on the Thread. /// - public sealed class MessageResponse : BaseResponse + public sealed class MessageResponse : BaseResponse, IServerSentEvent { + public MessageResponse() { } + + internal MessageResponse(MessageResponse other) => AppendFrom(other); + /// /// The identifier, which can be referenced in API endpoints. /// @@ -28,6 +33,11 @@ public sealed class MessageResponse : BaseResponse [JsonPropertyName("object")] public string Object { get; private set; } + [JsonInclude] + [JsonPropertyName("delta")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public MessageDelta Delta { get; private set; } + /// /// The Unix timestamp (in seconds) for when the message was created. /// @@ -45,6 +55,50 @@ public sealed class MessageResponse : BaseResponse [JsonPropertyName("thread_id")] public string ThreadId { get; private set; } + /// + /// The status of the message, which can be either 'in_progress', 'incomplete', or 'completed'. + /// + [JsonInclude] + [JsonPropertyName("status")] + [JsonConverter(typeof(JsonStringEnumConverter))] + public MessageStatus Status { get; private set; } + + /// + /// On an incomplete message, details about why the message is incomplete. + /// + [JsonInclude] + [JsonPropertyName("incomplete_details")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IncompleteDetails IncompleteDetails { get; private set; } + + /// + /// The Unix timestamp (in seconds) for when the message was completed. + /// + [JsonInclude] + [JsonPropertyName("completed_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? CompletedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? CompletedAt + => CompletedAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(CompletedAtUnixTimeSeconds.Value).DateTime + : null; + + /// + /// The Unix timestamp (in seconds) for when the message was marked as incomplete. + /// + [JsonInclude] + [JsonPropertyName("incomplete_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? IncompleteAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? IncompleteAt + => IncompleteAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(IncompleteAtUnixTimeSeconds.Value).DateTime + : null; + /// /// The entity that produced the message. One of user or assistant. /// @@ -52,12 +106,18 @@ public sealed class MessageResponse : BaseResponse [JsonPropertyName("role")] public Role Role { get; private set; } + private List content = new(); + /// /// The content of the message in array of text and/or images. /// [JsonInclude] [JsonPropertyName("content")] - public IReadOnlyList Content { get; private set; } + public IReadOnlyList Content + { + get => content; + private set => content = value?.ToList(); + } /// /// If applicable, the ID of the assistant that authored this message. @@ -78,9 +138,17 @@ public sealed class MessageResponse : BaseResponse /// Useful for tools like 'retrieval' and 'code_interpreter' that can access files. /// A maximum of 10 files can be attached to a message. /// + [JsonIgnore] + [Obsolete("Use Attachments instead.")] + public IReadOnlyList FileIds => Attachments?.Select(attachment => attachment.FileId).ToList(); + + /// + /// A list of files attached to the message, and the tools they were added to. + /// [JsonInclude] - [JsonPropertyName("file_ids")] - public IReadOnlyList FileIds { get; private set; } + [JsonPropertyName("Attachments")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyList Attachments { get; private set; } /// /// Set of 16 key-value pairs that can be attached to an object. @@ -89,10 +157,14 @@ public sealed class MessageResponse : BaseResponse /// [JsonInclude] [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public IReadOnlyDictionary Metadata { get; private set; } public static implicit operator string(MessageResponse message) => message?.ToString(); + public static implicit operator Message(MessageResponse response) + => new(response.Content, response.Role, response.Attachments, response.Metadata); + public override string ToString() => Id; /// @@ -100,6 +172,79 @@ public sealed class MessageResponse : BaseResponse /// putting each item on a new line. /// /// of all . - public string PrintContent() => string.Join("\n", Content.Select(content => content?.ToString())); + public string PrintContent() + => content == null + ? string.Empty + : string.Join("\n", content.Select(c => c?.ToString())); + + internal void AppendFrom(MessageResponse other) + { + if (other == null) { return; } + + if (other.Delta != null) + { + if (Role == 0 && + other.Delta.Role > 0) + { + Role = other.Delta.Role; + } + + if (other.Delta.Content != null) + { + content ??= new List(); + content.AppendFrom(other.Delta.Content); + } + + // bail early since we only care about the delta content + return; + } + + if (Role == 0 && + other.Role > 0) + { + Role = other.Role; + } + + if (other.content != null) + { + content = other.content; + } + + if (CreatedAtUnixTimeSeconds == 0 && + other.CreatedAtUnixTimeSeconds > 0) + { + CreatedAtUnixTimeSeconds = other.CreatedAtUnixTimeSeconds; + } + + if (other.CompletedAtUnixTimeSeconds.HasValue) + { + CompletedAtUnixTimeSeconds = other.CompletedAtUnixTimeSeconds; + } + + if (other.IncompleteAtUnixTimeSeconds.HasValue) + { + IncompleteAtUnixTimeSeconds = other.IncompleteAtUnixTimeSeconds; + } + + if (other.Status > 0) + { + Status = other.Status; + } + + if (other.IncompleteDetails != null) + { + IncompleteDetails = other.IncompleteDetails; + } + + if (other.Attachments != null) + { + Attachments = other.Attachments; + } + + if (other.Metadata != null) + { + Metadata = other.Metadata; + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/MessageStatus.cs b/OpenAI-DotNet/Threads/MessageStatus.cs new file mode 100644 index 00000000..07f23339 --- /dev/null +++ b/OpenAI-DotNet/Threads/MessageStatus.cs @@ -0,0 +1,17 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Runtime.Serialization; + +namespace OpenAI.Threads +{ + public enum MessageStatus + { + NotStarted = 0, + [EnumMember(Value = "in_progress")] + InProgress, + [EnumMember(Value = "incomplete")] + Incomplete, + [EnumMember(Value = "completed")] + Completed + } +} diff --git a/OpenAI-DotNet/Threads/RequiredAction.cs b/OpenAI-DotNet/Threads/RequiredAction.cs index 5fb5c376..455463af 100644 --- a/OpenAI-DotNet/Threads/RequiredAction.cs +++ b/OpenAI-DotNet/Threads/RequiredAction.cs @@ -17,4 +17,4 @@ public sealed class RequiredAction [JsonPropertyName("submit_tool_outputs")] public SubmitToolOutputs SubmitToolOutputs { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/RunResponse.cs b/OpenAI-DotNet/Threads/RunResponse.cs index e8f82d0d..2a6c641b 100644 --- a/OpenAI-DotNet/Threads/RunResponse.cs +++ b/OpenAI-DotNet/Threads/RunResponse.cs @@ -3,6 +3,7 @@ using OpenAI.Extensions; using System; using System.Collections.Generic; +using System.Linq; using System.Text.Json.Serialization; namespace OpenAI.Threads @@ -12,8 +13,12 @@ namespace OpenAI.Threads /// The Assistant uses it's configuration and the Thread's Messages to perform tasks by calling models and tools. /// As part of a Run, the Assistant appends Messages to the Thread. /// - public sealed class RunResponse : BaseResponse + public sealed class RunResponse : BaseResponse, IServerSentEvent { + public RunResponse() { } + + internal RunResponse(RunResponse other) => AppendFrom(other); + /// /// The identifier, which can be referenced in API endpoints. /// @@ -28,6 +33,16 @@ public sealed class RunResponse : BaseResponse [JsonPropertyName("object")] public string Object { get; private set; } + /// + /// The Unix timestamp (in seconds) for when the thread was created. + /// + [JsonInclude] + [JsonPropertyName("created_at")] + public int CreatedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; + /// /// The thread ID that this run belongs to. /// @@ -56,6 +71,7 @@ public sealed class RunResponse : BaseResponse ///
[JsonInclude] [JsonPropertyName("required_action")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public RequiredAction RequiredAction { get; private set; } /// @@ -64,23 +80,15 @@ public sealed class RunResponse : BaseResponse /// [JsonInclude] [JsonPropertyName("last_error")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public Error LastError { get; private set; } - /// - /// The Unix timestamp (in seconds) for when the thread was created. - /// - [JsonInclude] - [JsonPropertyName("created_at")] - public int CreatedAtUnixTimeSeconds { get; private set; } - - [JsonIgnore] - public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; - /// /// The Unix timestamp (in seconds) for when the run will expire. /// [JsonInclude] [JsonPropertyName("expires_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? ExpiresAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -94,6 +102,7 @@ public DateTime? ExpiresAt ///
[JsonInclude] [JsonPropertyName("started_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? StartedAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -107,6 +116,7 @@ public DateTime? StartedAt ///
[JsonInclude] [JsonPropertyName("cancelled_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? CancelledAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -120,6 +130,7 @@ public DateTime? CancelledAt ///
[JsonInclude] [JsonPropertyName("failed_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? FailedAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -133,6 +144,7 @@ public DateTime? FailedAt ///
[JsonInclude] [JsonPropertyName("completed_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? CompletedAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -141,6 +153,11 @@ public DateTime? CompletedAt ? DateTimeOffset.FromUnixTimeSeconds(CompletedAtUnixTimeSeconds.Value).DateTime : null; + [JsonInclude] + [JsonPropertyName("incomplete_details")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IncompleteDetails IncompleteDetails { get; private set; } + /// /// The model that the assistant used for this run. /// @@ -155,19 +172,26 @@ public DateTime? CompletedAt [JsonPropertyName("instructions")] public string Instructions { get; private set; } + private List tools; + /// /// The list of tools that the assistant used for this run. /// [JsonInclude] [JsonPropertyName("tools")] - public IReadOnlyList Tools { get; private set; } + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyList Tools + { + get => tools; + private set => tools = value?.ToList(); + } /// /// The list of File IDs the assistant used for this run. /// - [JsonInclude] - [JsonPropertyName("file_ids")] - public IReadOnlyList FileIds { get; private set; } + [JsonIgnore] + [Obsolete("Removed")] + public IReadOnlyList FileIds => null; /// /// Set of 16 key-value pairs that can be attached to an object. @@ -176,6 +200,7 @@ public DateTime? CompletedAt /// [JsonInclude] [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public IReadOnlyDictionary Metadata { get; private set; } /// @@ -183,10 +208,185 @@ public DateTime? CompletedAt /// [JsonInclude] [JsonPropertyName("usage")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public Usage Usage { get; private set; } + /// + /// The sampling temperature used for this run. If not set, defaults to 1. + /// + [JsonInclude] + [JsonPropertyName("temperature")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public double? Temperature { get; private set; } + + /// + /// The nucleus sampling value used for this run. If not set, defaults to 1. + /// + [JsonInclude] + [JsonPropertyName("top_p")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public double? TopP { get; private set; } + + /// + /// The maximum number of prompt tokens specified to have been used over the course of the run. + /// + [JsonInclude] + [JsonPropertyName("max_prompt_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? MaxPromptTokens { get; private set; } + + /// + /// The maximum number of completion tokens specified to have been used over the course of the run. + /// + [JsonInclude] + [JsonPropertyName("max_completion_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? MaxCompletionTokens { get; private set; } + + /// + /// Controls for how a thread will be truncated prior to the run. Use this to control the initial context window of the run. + /// + [JsonInclude] + [JsonPropertyName("truncation_strategy")] + public TruncationStrategy TruncationStrategy { get; private set; } + + /// + /// Controls which (if any) tool is called by the model. + /// none means the model will not call any tools and instead generates a message. + /// auto is the default value and means the model can pick between generating a message or calling one or more tools. + /// required means the model must call one or more tools before responding to the user. + /// Specifying a particular tool like {"type": "file_search"} or {"type": "function", "function": {"name": "my_function"}} + /// forces the model to call that tool. + /// + [JsonInclude] + [JsonPropertyName("tool_choice")] + public dynamic ToolChoice { get; private set; } + + [JsonInclude] + [JsonPropertyName("parallel_tool_calls")] + public bool ParallelToolCalls { get; private set; } + + /// + /// Specifies the format that the model must output. + /// Setting to enables JSON mode, + /// which guarantees the message the model generates is valid JSON. + /// + /// + /// Important: When using JSON mode you must still instruct the model to produce JSON yourself via some conversation message, + /// for example via your system message. If you don't do this, the model may generate an unending stream of + /// whitespace until the generation reaches the token limit, which may take a lot of time and give the appearance + /// of a "stuck" request. Also note that the message content may be partial (i.e. cut off) if finish_reason="length", + /// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length. + /// + [JsonInclude] + [JsonPropertyName("response_format")] + [JsonConverter(typeof(ResponseFormatConverter))] + public ChatResponseFormat ResponseFormat { get; private set; } + public static implicit operator string(RunResponse run) => run?.ToString(); public override string ToString() => Id; + + internal void AppendFrom(RunResponse other) + { + if (other is null) { return; } + + if (other.Status > 0) + { + Status = other.Status; + } + + if (other.RequiredAction != null) + { + RequiredAction = other.RequiredAction; + } + + if (other.LastError != null) + { + LastError = other.LastError; + } + + if (other.ExpiresAtUnixTimeSeconds.HasValue) + { + ExpiresAtUnixTimeSeconds = other.ExpiresAtUnixTimeSeconds; + } + + if (other.StartedAtUnixTimeSeconds.HasValue) + { + StartedAtUnixTimeSeconds = other.StartedAtUnixTimeSeconds; + } + + if (other.CancelledAtUnixTimeSeconds.HasValue) + { + CancelledAtUnixTimeSeconds = other.CancelledAtUnixTimeSeconds; + } + + if (other.FailedAtUnixTimeSeconds.HasValue) + { + FailedAtUnixTimeSeconds = other.FailedAtUnixTimeSeconds; + } + + if (other.CompletedAtUnixTimeSeconds.HasValue) + { + CompletedAtUnixTimeSeconds = other.CompletedAtUnixTimeSeconds; + } + + if (other.IncompleteDetails != null) + { + IncompleteDetails = other.IncompleteDetails; + } + + if (other is { Tools: not null }) + { + tools ??= new List(); + tools.AppendFrom(other.Tools); + } + + if (other.Metadata is { Count: > 0 }) + { + Metadata = other.Metadata; + } + + if (other.Usage != null) + { + Usage = other.Usage; + } + + if (other.Temperature.HasValue) + { + Temperature = other.Temperature; + } + + if (other.TopP.HasValue) + { + TopP = other.TopP; + } + + if (other.MaxPromptTokens.HasValue) + { + MaxPromptTokens = other.MaxPromptTokens; + } + + if (other.MaxCompletionTokens.HasValue) + { + MaxCompletionTokens = other.MaxCompletionTokens; + } + + if (other.TruncationStrategy != null) + { + TruncationStrategy = other.TruncationStrategy; + } + + if (other.ToolChoice is string stringToolChoice) + { + ToolChoice = stringToolChoice; + } + else + { + ToolChoice = other.ToolChoice; + } + + ResponseFormat = other.ResponseFormat; + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/RunStatus.cs b/OpenAI-DotNet/Threads/RunStatus.cs index 0baf79b8..d7cd5063 100644 --- a/OpenAI-DotNet/Threads/RunStatus.cs +++ b/OpenAI-DotNet/Threads/RunStatus.cs @@ -12,15 +12,17 @@ public enum RunStatus InProgress, [EnumMember(Value = "requires_action")] RequiresAction, + [EnumMember(Value = "incomplete")] + Incomplete, [EnumMember(Value = "cancelling")] Cancelling, [EnumMember(Value = "cancelled")] Cancelled, - [EnumMember(Value = "failed")] - Failed, [EnumMember(Value = "completed")] Completed, + [EnumMember(Value = "failed")] + Failed, [EnumMember(Value = "expired")] Expired } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/RunStepDelta.cs b/OpenAI-DotNet/Threads/RunStepDelta.cs new file mode 100644 index 00000000..4da6a23d --- /dev/null +++ b/OpenAI-DotNet/Threads/RunStepDelta.cs @@ -0,0 +1,14 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Text.Json.Serialization; + +namespace OpenAI.Threads +{ + public class RunStepDelta + { + [JsonInclude] + [JsonPropertyName("step_details")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public StepDetails StepDetails { get; private set; } + } +} diff --git a/OpenAI-DotNet/Threads/RunStepMessageCreation.cs b/OpenAI-DotNet/Threads/RunStepMessageCreation.cs index eb3dc75d..2a594ee0 100644 --- a/OpenAI-DotNet/Threads/RunStepMessageCreation.cs +++ b/OpenAI-DotNet/Threads/RunStepMessageCreation.cs @@ -12,5 +12,13 @@ public sealed class RunStepMessageCreation [JsonInclude] [JsonPropertyName("message_id")] public string MessageId { get; private set; } + + internal void AppendFrom(RunStepMessageCreation other) + { + if (!string.IsNullOrWhiteSpace(other.MessageId)) + { + MessageId = other.MessageId; + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/RunStepResponse.cs b/OpenAI-DotNet/Threads/RunStepResponse.cs index 820b74db..d502f7dd 100644 --- a/OpenAI-DotNet/Threads/RunStepResponse.cs +++ b/OpenAI-DotNet/Threads/RunStepResponse.cs @@ -12,8 +12,12 @@ namespace OpenAI.Threads /// An Assistant can call tools or create Messages during it's run. /// Examining Run Steps allows you to introspect how the Assistant is getting to it's final results. ///
- public sealed class RunStepResponse : BaseResponse + public sealed class RunStepResponse : BaseResponse, IServerSentEvent { + public RunStepResponse() { } + + internal RunStepResponse(RunStepResponse other) => AppendFrom(other); + /// /// The identifier of the run step, which can be referenced in API endpoints. /// @@ -24,6 +28,26 @@ public sealed class RunStepResponse : BaseResponse [JsonInclude] [JsonPropertyName("object")] public string Object { get; private set; } + + [JsonInclude] + [JsonPropertyName("delta")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public RunStepDelta Delta { get; } + + /// + /// The Unix timestamp (in seconds) for when the run step was created. + /// + [JsonInclude] + [JsonPropertyName("created_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? CreatedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime? CreatedAt + => CreatedAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds.Value).DateTime + : null; + /// /// The ID of the assistant associated with the run step. /// @@ -66,6 +90,7 @@ public sealed class RunStepResponse : BaseResponse /// [JsonInclude] [JsonPropertyName("step_details")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public StepDetails StepDetails { get; private set; } /// @@ -73,39 +98,37 @@ public sealed class RunStepResponse : BaseResponse /// [JsonInclude] [JsonPropertyName("last_error")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public Error LastError { get; private set; } /// - /// The Unix timestamp (in seconds) for when the run step was created. + /// The Unix timestamp (in seconds) for when the run step expired. A step is considered expired if the parent run is expired. /// [JsonInclude] - [JsonPropertyName("created_at")] - public int? CreatedAtUnixTimeSeconds { get; private set; } + [JsonPropertyName("expired_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? ExpiredAtUnixTimeSeconds { get; private set; } [JsonIgnore] - public DateTime? CreatedAt - => CreatedAtUnixTimeSeconds.HasValue - ? DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds.Value).DateTime - : null; - - /// - /// The Unix timestamp (in seconds) for when the run step expired. A step is considered expired if the parent run is expired. - /// - [JsonInclude] - [JsonPropertyName("expires_at")] - public int? ExpiresAtUnixTimeSeconds { get; private set; } + [Obsolete("use ExpiredAtUnixTimeSeconds")] + public int? ExpiresAtUnitTimeSeconds => ExpiredAtUnixTimeSeconds; [JsonIgnore] - public DateTime? ExpiresAt - => ExpiresAtUnixTimeSeconds.HasValue - ? DateTimeOffset.FromUnixTimeSeconds(ExpiresAtUnixTimeSeconds.Value).DateTime + public DateTime? ExpiredAt + => ExpiredAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(ExpiredAtUnixTimeSeconds.Value).DateTime : null; + [JsonIgnore] + [Obsolete("Use ExpiredAt")] + public DateTime? ExpiresAt => ExpiredAt; + /// /// The Unix timestamp (in seconds) for when the run step was cancelled. /// [JsonInclude] [JsonPropertyName("cancelled_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? CancelledAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -119,6 +142,7 @@ public DateTime? CancelledAt /// [JsonInclude] [JsonPropertyName("failed_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? FailedAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -132,6 +156,7 @@ public DateTime? FailedAt /// [JsonInclude] [JsonPropertyName("completed_at")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public int? CompletedAtUnixTimeSeconds { get; private set; } [JsonIgnore] @@ -147,6 +172,7 @@ public DateTime? CompletedAt /// [JsonInclude] [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public IReadOnlyDictionary Metadata { get; private set; } /// @@ -154,10 +180,89 @@ public DateTime? CompletedAt /// [JsonInclude] [JsonPropertyName("usage")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public Usage Usage { get; private set; } public static implicit operator string(RunStepResponse runStep) => runStep?.ToString(); public override string ToString() => Id; + + internal void AppendFrom(RunStepResponse other) + { + if (other == null) { return; } + + if (other.Delta != null) + { + if (other.Delta.StepDetails != null) + { + if (StepDetails == null) + { + StepDetails = new StepDetails(other.Delta.StepDetails); + } + else + { + StepDetails.AppendFrom(other.Delta.StepDetails); + } + } + + // don't update other fields if we are just appending Delta + return; + } + + if (other.CreatedAtUnixTimeSeconds.HasValue) + { + CreatedAtUnixTimeSeconds = other.CreatedAtUnixTimeSeconds; + } + + if (other.Type > 0) + { + Type = other.Type; + } + + if (other.Status > 0) + { + Status = other.Status; + } + + if (other.StepDetails != null) + { + StepDetails = other.StepDetails; + } + + if (other.LastError != null) + { + LastError = other.LastError; + } + + if (other.ExpiredAtUnixTimeSeconds.HasValue) + { + ExpiredAtUnixTimeSeconds = other.ExpiredAtUnixTimeSeconds; + } + + if (other.CancelledAtUnixTimeSeconds.HasValue) + { + CancelledAtUnixTimeSeconds = other.CancelledAtUnixTimeSeconds; + } + + if (other.FailedAtUnixTimeSeconds.HasValue) + { + FailedAtUnixTimeSeconds = other.FailedAtUnixTimeSeconds; + } + + if (other.CompletedAtUnixTimeSeconds.HasValue) + { + CompletedAtUnixTimeSeconds = other.CompletedAtUnixTimeSeconds; + } + + if (other.Metadata is { Count: > 0 }) + { + Metadata = new Dictionary(other.Metadata); + } + + if (other.Usage != null) + { + Usage = other.Usage; + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/RunStepType.cs b/OpenAI-DotNet/Threads/RunStepType.cs index 7c438e8d..85478906 100644 --- a/OpenAI-DotNet/Threads/RunStepType.cs +++ b/OpenAI-DotNet/Threads/RunStepType.cs @@ -11,4 +11,4 @@ public enum RunStepType [EnumMember(Value = "tool_calls")] ToolCalls } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/StepDetails.cs b/OpenAI-DotNet/Threads/StepDetails.cs index dd2f735c..1d3fd122 100644 --- a/OpenAI-DotNet/Threads/StepDetails.cs +++ b/OpenAI-DotNet/Threads/StepDetails.cs @@ -1,6 +1,8 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; using System.Collections.Generic; +using System.Linq; using System.Text.Json.Serialization; namespace OpenAI.Threads @@ -10,19 +12,52 @@ namespace OpenAI.Threads /// public sealed class StepDetails { + public StepDetails() { } + + internal StepDetails(StepDetails other) => AppendFrom(other); + /// /// Details of the message creation by the run step. /// [JsonInclude] [JsonPropertyName("message_creation")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public RunStepMessageCreation MessageCreation { get; private set; } + private List toolCalls; + /// /// An array of tool calls the run step was involved in. /// These can be associated with one of three types of tools: 'code_interpreter', 'retrieval', or 'function'. /// [JsonInclude] [JsonPropertyName("tool_calls")] - public IReadOnlyList ToolCalls { get; private set; } + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyList ToolCalls + { + get => toolCalls; + private set => toolCalls = value?.ToList(); + } + + internal void AppendFrom(StepDetails other) + { + if (other.MessageCreation != null) + { + if (MessageCreation == null) + { + MessageCreation = other.MessageCreation; + } + else + { + MessageCreation.AppendFrom(other.MessageCreation); + } + } + + if (other.ToolCalls != null) + { + toolCalls ??= new List(); + toolCalls.AppendFrom(other.ToolCalls); + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/SubmitToolOutputs.cs b/OpenAI-DotNet/Threads/SubmitToolOutputs.cs index 810d3c63..14b324c9 100644 --- a/OpenAI-DotNet/Threads/SubmitToolOutputs.cs +++ b/OpenAI-DotNet/Threads/SubmitToolOutputs.cs @@ -14,4 +14,4 @@ public sealed class SubmitToolOutputs [JsonPropertyName("tool_calls")] public IReadOnlyList ToolCalls { get; private set; } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/SubmitToolOutputsRequest.cs b/OpenAI-DotNet/Threads/SubmitToolOutputsRequest.cs index 3f7432ce..854002f8 100644 --- a/OpenAI-DotNet/Threads/SubmitToolOutputsRequest.cs +++ b/OpenAI-DotNet/Threads/SubmitToolOutputsRequest.cs @@ -30,10 +30,17 @@ public SubmitToolOutputsRequest(IEnumerable toolOutputs) [JsonPropertyName("tool_outputs")] public IReadOnlyList ToolOutputs { get; } + /// + /// If true, returns a stream of events that happen during the Run as server-sent events, + /// terminating when the Run enters a terminal state with a data: [DONE] message. + /// + [JsonPropertyName("stream")] + public bool Stream { get; internal set; } + public static implicit operator SubmitToolOutputsRequest(ToolOutput toolOutput) => new(toolOutput); public static implicit operator SubmitToolOutputsRequest(ToolOutput[] toolOutputs) => new(toolOutputs); public static implicit operator SubmitToolOutputsRequest(List toolOutputs) => new(toolOutputs); } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/TextContent.cs b/OpenAI-DotNet/Threads/TextContent.cs deleted file mode 100644 index b6402607..00000000 --- a/OpenAI-DotNet/Threads/TextContent.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Licensed under the MIT License. See LICENSE in the project root for license information. - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace OpenAI.Threads -{ - public sealed class TextContent - { - /// - /// The data that makes up the text. - /// - [JsonInclude] - [JsonPropertyName("value")] - public string Value { get; private set; } - - /// - /// Annotations - /// - [JsonInclude] - [JsonPropertyName("annotations")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public IReadOnlyList Annotations { get; private set; } - } -} \ No newline at end of file diff --git a/OpenAI-DotNet/Threads/ThreadExtensions.cs b/OpenAI-DotNet/Threads/ThreadExtensions.cs index 91c11968..e880cbe1 100644 --- a/OpenAI-DotNet/Threads/ThreadExtensions.cs +++ b/OpenAI-DotNet/Threads/ThreadExtensions.cs @@ -1,7 +1,9 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Assistants; using System; using System.Collections.Generic; +using System.Linq; using System.Threading; using System.Threading.Tasks; @@ -35,10 +37,24 @@ public static async Task ModifyAsync(this ThreadResponse thread, /// Deletes the thread. /// /// . + /// Optional, should tool resources, such as vector stores be deleted when this thread is deleted? /// Optional, . /// True, if the thread was successfully deleted. - public static async Task DeleteAsync(this ThreadResponse thread, CancellationToken cancellationToken = default) - => await thread.Client.ThreadsEndpoint.DeleteThreadAsync(thread, cancellationToken).ConfigureAwait(false); + public static async Task DeleteAsync(this ThreadResponse thread, bool deleteToolResources = false, CancellationToken cancellationToken = default) + { + var deleteTasks = new List> { thread.Client.ThreadsEndpoint.DeleteThreadAsync(thread, cancellationToken) }; + + if (deleteToolResources && thread.ToolResources?.FileSearch?.VectorStoreIds is { Count: > 0 }) + { + deleteTasks.AddRange( + from vectorStoreId in thread.ToolResources?.FileSearch?.VectorStoreIds + where !string.IsNullOrWhiteSpace(vectorStoreId) + select thread.Client.VectorStoresEndpoint.DeleteVectorStoreAsync(vectorStoreId, cancellationToken)); + } + + await Task.WhenAll(deleteTasks).ConfigureAwait(false); + return deleteTasks.TrueForAll(task => task.Result); + } #region Messages @@ -46,10 +62,10 @@ public static async Task DeleteAsync(this ThreadResponse thread, Cancellat /// Create a new message for this thread. /// /// . - /// . + /// . /// Optional, . /// . - public static async Task CreateMessageAsync(this ThreadResponse thread, CreateMessageRequest request, CancellationToken cancellationToken = default) + public static async Task CreateMessageAsync(this ThreadResponse thread, Message request, CancellationToken cancellationToken = default) => await thread.Client.ThreadsEndpoint.CreateMessageAsync(thread.Id, request, cancellationToken).ConfigureAwait(false); /// @@ -58,9 +74,9 @@ public static async Task CreateMessageAsync(this ThreadResponse /// . /// Optional, . /// Optional, . - /// + /// . public static async Task> ListMessagesAsync(this ThreadResponse thread, ListQuery query = null, CancellationToken cancellationToken = default) - => await thread.Client.ThreadsEndpoint.ListMessagesAsync(thread.Id, query, cancellationToken).ConfigureAwait(false); + => await thread.Client.ThreadsEndpoint.ListMessagesAsync(thread.Id, query, null, cancellationToken).ConfigureAwait(false); /// /// Retrieve a message. @@ -116,7 +132,7 @@ public static async Task ModifyMessageAsync(this ThreadResponse #endregion Messages - #region Files + #region Files (Obsolete) /// /// Returns a list of message files. @@ -126,6 +142,7 @@ public static async Task ModifyMessageAsync(this ThreadResponse /// . /// Optional, . /// . + [Obsolete("MessageFiles removed from Threads. Files now belong to ToolResources.")] public static async Task> ListFilesAsync(this ThreadResponse thread, string messageId, ListQuery query = null, CancellationToken cancellationToken = default) => await thread.Client.ThreadsEndpoint.ListFilesAsync(thread.Id, messageId, query, cancellationToken).ConfigureAwait(false); @@ -136,6 +153,7 @@ public static async Task> ListFilesAsync(this /// . /// Optional, . /// . + [Obsolete("MessageFiles removed from Threads. Files now belong to ToolResources.")] public static async Task> ListFilesAsync(this MessageResponse message, ListQuery query = null, CancellationToken cancellationToken = default) => await message.Client.ThreadsEndpoint.ListFilesAsync(message.ThreadId, message.Id, query, cancellationToken).ConfigureAwait(false); @@ -146,35 +164,11 @@ public static async Task> ListFilesAsync(this /// The id of the file being retrieved. /// Optional, . /// . + [Obsolete("MessageFiles removed from Threads. Files now belong to ToolResources.")] public static async Task RetrieveFileAsync(this MessageResponse message, string fileId, CancellationToken cancellationToken = default) => await message.Client.ThreadsEndpoint.RetrieveFileAsync(message.ThreadId, message.Id, fileId, cancellationToken).ConfigureAwait(false); - // TODO 400 bad request errors. Likely OpenAI bug downloading message file content. - ///// - ///// Downloads a message file content to local disk. - ///// - ///// . - ///// The id of the file being retrieved. - ///// Directory to save the file content. - ///// Optional, delete cached file. Defaults to false. - ///// Optional, . - ///// Path to the downloaded file content. - //public static async Task DownloadFileContentAsync(this MessageResponse message, string fileId, string directory, bool deleteCachedFile = false, CancellationToken cancellationToken = default) - // => await message.Client.FilesEndpoint.DownloadFileAsync(fileId, directory, deleteCachedFile, cancellationToken).ConfigureAwait(false); - - // TODO 400 bad request errors. Likely OpenAI bug downloading message file content. - ///// - ///// Downloads a message file content to local disk. - ///// - ///// . - ///// Directory to save the file content. - ///// Optional, delete cached file. Defaults to false. - ///// Optional, . - ///// Path to the downloaded file content. - //public static async Task DownloadContentAsync(this MessageFileResponse file, string directory, bool deleteCachedFile = false, CancellationToken cancellationToken = default) - // => await file.Client.FilesEndpoint.DownloadFileAsync(file.Id, directory, deleteCachedFile, cancellationToken).ConfigureAwait(false); - - #endregion Files + #endregion Files (Obsolete) #region Runs @@ -183,20 +177,32 @@ public static async Task RetrieveFileAsync(this MessageResp /// /// . /// . + /// Optional, stream callback handler. /// Optional, . /// . - public static async Task CreateRunAsync(this ThreadResponse thread, CreateRunRequest request = null, CancellationToken cancellationToken = default) - => await thread.Client.ThreadsEndpoint.CreateRunAsync(thread, request, cancellationToken).ConfigureAwait(false); + public static async Task CreateRunAsync(this ThreadResponse thread, CreateRunRequest request = null, Action streamEventHandler = null, CancellationToken cancellationToken = default) + => await thread.Client.ThreadsEndpoint.CreateRunAsync(thread, request, streamEventHandler, cancellationToken).ConfigureAwait(false); /// /// Create a run. /// /// . - /// Id of the assistant to use for the run. + /// The to use for the run. + /// Optional, stream callback handler. /// Optional, . /// . - public static async Task CreateRunAsync(this ThreadResponse thread, string assistantId, CancellationToken cancellationToken = default) - => await thread.Client.ThreadsEndpoint.CreateRunAsync(thread, new CreateRunRequest(assistantId), cancellationToken).ConfigureAwait(false); + public static async Task CreateRunAsync(this ThreadResponse thread, AssistantResponse assistant, Action streamEventHandler = null, CancellationToken cancellationToken = default) + { + var request = new CreateRunRequest( + assistant, + model: assistant.Model, + instructions: assistant.Instructions, + tools: assistant.Tools, + temperature: assistant.Temperature, + topP: assistant.TopP, + responseFormat: assistant.ResponseFormat); + return await thread.Client.ThreadsEndpoint.CreateRunAsync(thread, request, streamEventHandler, cancellationToken).ConfigureAwait(false); + } /// /// Gets the thread associated to the . @@ -208,12 +214,12 @@ public static async Task GetThreadAsync(this RunResponse run, Ca => await run.Client.ThreadsEndpoint.RetrieveThreadAsync(run.ThreadId, cancellationToken).ConfigureAwait(false); /// - /// List all of the runs associated to a thread. + /// Lists all the runs associated to a thread. /// /// . /// . /// Optional, . - /// + /// . public static async Task> ListRunsAsync(this ThreadResponse thread, ListQuery query = null, CancellationToken cancellationToken = default) => await thread.Client.ThreadsEndpoint.ListRunsAsync(thread.Id, query, cancellationToken).ConfigureAwait(false); @@ -261,16 +267,16 @@ public static async Task ModifyAsync(this RunResponse run, IReadOnl /// . public static async Task WaitForStatusChangeAsync(this RunResponse run, int? pollingInterval = null, int? timeout = null, CancellationToken cancellationToken = default) { - using CancellationTokenSource cts = timeout.HasValue && timeout < 0 + using CancellationTokenSource cts = timeout is < 0 ? new CancellationTokenSource() : new CancellationTokenSource(TimeSpan.FromSeconds(timeout ?? 30)); using var chainedCts = CancellationTokenSource.CreateLinkedTokenSource(cts.Token, cancellationToken); RunResponse result; do { - await Task.Delay(pollingInterval ?? 500, chainedCts.Token); + await Task.Delay(pollingInterval ?? 500, chainedCts.Token).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); - result = await run.UpdateAsync(cancellationToken: chainedCts.Token); + result = await run.UpdateAsync(cancellationToken: chainedCts.Token).ConfigureAwait(false); } while (result.Status is RunStatus.Queued or RunStatus.InProgress or RunStatus.Cancelling); return result; } @@ -282,10 +288,11 @@ public static async Task WaitForStatusChangeAsync(this RunResponse /// /// to submit outputs for. /// . + /// Optional, stream callback handler. /// Optional, . /// . - public static async Task SubmitToolOutputsAsync(this RunResponse run, SubmitToolOutputsRequest request, CancellationToken cancellationToken = default) - => await run.Client.ThreadsEndpoint.SubmitToolOutputsAsync(run.ThreadId, run.Id, request, cancellationToken).ConfigureAwait(false); + public static async Task SubmitToolOutputsAsync(this RunResponse run, SubmitToolOutputsRequest request, Action streamEventHandler = null, CancellationToken cancellationToken = default) + => await run.Client.ThreadsEndpoint.SubmitToolOutputsAsync(run.ThreadId, run.Id, request, streamEventHandler, cancellationToken).ConfigureAwait(false); /// /// When a run has the status: "requires_action" and required_action.type is submit_tool_outputs, @@ -294,10 +301,11 @@ public static async Task SubmitToolOutputsAsync(this RunResponse ru /// /// to submit outputs for. /// s + /// Optional, stream callback handler. /// Optional, . /// . - public static async Task SubmitToolOutputsAsync(this RunResponse run, IEnumerable outputs, CancellationToken cancellationToken = default) - => await run.SubmitToolOutputsAsync(new SubmitToolOutputsRequest(outputs), cancellationToken).ConfigureAwait(false); + public static async Task SubmitToolOutputsAsync(this RunResponse run, IEnumerable outputs, Action streamEventHandler = null, CancellationToken cancellationToken = default) + => await run.SubmitToolOutputsAsync(new SubmitToolOutputsRequest(outputs), streamEventHandler, cancellationToken).ConfigureAwait(false); /// /// Returns a list of run steps belonging to a run. @@ -334,7 +342,7 @@ public static async Task UpdateAsync(this RunStepResponse runSt /// to cancel. /// Optional, . /// . - public static async Task CancelAsync(this RunResponse run, CancellationToken cancellationToken = default) + public static async Task CancelAsync(this RunResponse run, CancellationToken cancellationToken = default) => await run.Client.ThreadsEndpoint.CancelRunAsync(run.ThreadId, run.Id, cancellationToken).ConfigureAwait(false); /// @@ -345,8 +353,8 @@ public static async Task CancelAsync(this RunResponse run, Cancella /// Optional, . /// . public static async Task> ListMessagesAsync(this RunResponse run, ListQuery query = null, CancellationToken cancellationToken = default) - => await run.Client.ThreadsEndpoint.ListMessagesAsync(run.ThreadId, query, cancellationToken).ConfigureAwait(false); + => await run.Client.ThreadsEndpoint.ListMessagesAsync(run.ThreadId, query, run.Id, cancellationToken).ConfigureAwait(false); #endregion Runs } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/ThreadResponse.cs b/OpenAI-DotNet/Threads/ThreadResponse.cs index cd458cd1..fdfe6d03 100644 --- a/OpenAI-DotNet/Threads/ThreadResponse.cs +++ b/OpenAI-DotNet/Threads/ThreadResponse.cs @@ -10,7 +10,7 @@ namespace OpenAI.Threads /// A conversation session between an Assistant and a user. /// Threads store Messages and automatically handle truncation to fit content into a model's context. /// - public sealed class ThreadResponse : BaseResponse + public sealed class ThreadResponse : BaseResponse, IServerSentEvent { /// /// The identifier, which can be referenced in API endpoints. @@ -36,6 +36,17 @@ public sealed class ThreadResponse : BaseResponse [JsonIgnore] public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; + /// + /// A set of resources that are made available to the assistant's tools in this thread. + /// The resources are specific to the type of tool. + /// For example, the code_interpreter tool requires a list of file IDs, + /// while the file_search tool requires a list of vector store IDs. + /// + [JsonInclude] + [JsonPropertyName("tool_resources")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ToolResources ToolResources { get; private set; } + /// /// Set of 16 key-value pairs that can be attached to an object. /// This can be useful for storing additional information about the object in a structured format. @@ -43,10 +54,11 @@ public sealed class ThreadResponse : BaseResponse /// [JsonInclude] [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] public IReadOnlyDictionary Metadata { get; private set; } public static implicit operator string(ThreadResponse thread) => thread?.ToString(); public override string ToString() => Id; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/ThreadsEndpoint.cs b/OpenAI-DotNet/Threads/ThreadsEndpoint.cs index 14c5ccc0..ab2e2177 100644 --- a/OpenAI-DotNet/Threads/ThreadsEndpoint.cs +++ b/OpenAI-DotNet/Threads/ThreadsEndpoint.cs @@ -1,7 +1,9 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. using OpenAI.Extensions; +using System; using System.Collections.Generic; +using System.Net.Http; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -26,9 +28,9 @@ public ThreadsEndpoint(OpenAIClient client) : base(client) { } /// . public async Task CreateThreadAsync(CreateThreadRequest request = null, CancellationToken cancellationToken = default) { - using var jsonContent = request != null ? JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent() : null; - using var response = await client.Client.PostAsync(GetUrl(), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = request != null ? JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent() : null; + using var response = await client.Client.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -60,9 +62,9 @@ public async Task RetrieveThreadAsync(string threadId, Cancellat /// . public async Task ModifyThreadAsync(string threadId, IReadOnlyDictionary metadata, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(new { metadata }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{threadId}"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(new { metadata }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{threadId}"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -76,7 +78,7 @@ public async Task DeleteThreadAsync(string threadId, CancellationToken can { using var response = await client.Client.DeleteAsync(GetUrl($"/{threadId}"), cancellationToken).ConfigureAwait(false); var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return JsonSerializer.Deserialize(responseAsString, OpenAIClient.JsonSerializationOptions)?.Deleted ?? false; + return response.Deserialize(responseAsString, client)?.Deleted ?? false; } #region Messages @@ -85,14 +87,14 @@ public async Task DeleteThreadAsync(string threadId, CancellationToken can /// Create a message. /// /// The id of the thread to create a message for. - /// + /// . /// Optional, . /// . - public async Task CreateMessageAsync(string threadId, CreateMessageRequest request, CancellationToken cancellationToken = default) + public async Task CreateMessageAsync(string threadId, Message message, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/messages"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(message, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/messages"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -101,11 +103,20 @@ public async Task CreateMessageAsync(string threadId, CreateMes /// /// The id of the thread the messages belong to. /// . + /// Optional, filter messages by the run ID that generated them. /// Optional, . /// . - public async Task> ListMessagesAsync(string threadId, ListQuery query = null, CancellationToken cancellationToken = default) + public async Task> ListMessagesAsync(string threadId, ListQuery query = null, string runId = null, CancellationToken cancellationToken = default) { - using var response = await client.Client.GetAsync(GetUrl($"/{threadId}/messages", query), cancellationToken).ConfigureAwait(false); + Dictionary queryParams = query; + + if (!string.IsNullOrWhiteSpace(runId)) + { + queryParams ??= new(); + queryParams.Add("run_id", runId); + } + + using var response = await client.Client.GetAsync(GetUrl($"/{threadId}/messages", queryParams), cancellationToken).ConfigureAwait(false); var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); return response.Deserialize>(responseAsString, client); } @@ -156,48 +167,14 @@ public async Task ModifyMessageAsync(MessageResponse message, I /// . public async Task ModifyMessageAsync(string threadId, string messageId, IReadOnlyDictionary metadata, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(new { metadata }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/messages/{messageId}"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(new { metadata }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/messages/{messageId}"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } #endregion Messages - #region Files - - /// - /// Returns a list of message files. - /// - /// The id of the thread that the message and files belong to. - /// The id of the message that the files belongs to. - /// . - /// Optional, . - /// . - public async Task> ListFilesAsync(string threadId, string messageId, ListQuery query = null, CancellationToken cancellationToken = default) - { - using var response = await client.Client.GetAsync(GetUrl($"/{threadId}/messages/{messageId}/files", query), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return response.Deserialize>(responseAsString, client); - } - - /// - /// Retrieve message file. - /// - /// The id of the thread to which the message and file belong. - /// The id of the message the file belongs to. - /// The id of the file being retrieved. - /// Optional, . - /// . - public async Task RetrieveFileAsync(string threadId, string messageId, string fileId, CancellationToken cancellationToken = default) - { - using var response = await client.Client.GetAsync(GetUrl($"/{threadId}/messages/{messageId}/files/{fileId}"), cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return response.Deserialize(responseAsString, client); - } - - #endregion Files - #region Runs /// @@ -218,10 +195,11 @@ public async Task> ListRunsAsync(string threadId, List /// Create a run. /// /// The id of the thread to run. - /// + /// . + /// Optional, stream callback handler. /// Optional, . /// . - public async Task CreateRunAsync(string threadId, CreateRunRequest request = null, CancellationToken cancellationToken = default) + public async Task CreateRunAsync(string threadId, CreateRunRequest request = null, Action streamEventHandler = null, CancellationToken cancellationToken = default) { if (request == null || string.IsNullOrWhiteSpace(request.AssistantId)) { @@ -229,9 +207,17 @@ public async Task CreateRunAsync(string threadId, CreateRunRequest request = new CreateRunRequest(assistant, request); } - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/runs"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + request.Stream = streamEventHandler != null; + var endpoint = GetUrl($"/{threadId}/runs"); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + + if (request.Stream) + { + return await StreamRunAsync(endpoint, payload, streamEventHandler, cancellationToken); + } + + using var response = await client.Client.PostAsync(endpoint, payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -239,9 +225,10 @@ public async Task CreateRunAsync(string threadId, CreateRunRequest /// Create a thread and run it in one request. /// /// . + /// Optional, stream callback handler. /// Optional, . /// . - public async Task CreateThreadAndRunAsync(CreateThreadAndRunRequest request = null, CancellationToken cancellationToken = default) + public async Task CreateThreadAndRunAsync(CreateThreadAndRunRequest request = null, Action streamEventHandler = null, CancellationToken cancellationToken = default) { if (request == null || string.IsNullOrWhiteSpace(request.AssistantId)) { @@ -249,9 +236,17 @@ public async Task CreateThreadAndRunAsync(CreateThreadAndRunRequest request = new CreateThreadAndRunRequest(assistant, request); } - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl("/runs"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + request.Stream = streamEventHandler != null; + var endpoint = GetUrl("/runs"); + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + + if (request.Stream) + { + return await StreamRunAsync(endpoint, payload, streamEventHandler, cancellationToken); + } + + using var response = await client.Client.PostAsync(endpoint, payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -284,9 +279,9 @@ public async Task RetrieveRunAsync(string threadId, string runId, C /// . public async Task ModifyRunAsync(string threadId, string runId, IReadOnlyDictionary metadata, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(new { metadata }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/runs/{runId}"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + using var payload = JsonSerializer.Serialize(new { metadata }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/runs/{runId}"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -298,13 +293,22 @@ public async Task ModifyRunAsync(string threadId, string runId, IRe /// The id of the thread to which this run belongs. /// The id of the run that requires the tool output submission. /// . + /// Optional, stream callback handler. /// Optional, . /// . - public async Task SubmitToolOutputsAsync(string threadId, string runId, SubmitToolOutputsRequest request, CancellationToken cancellationToken = default) + public async Task SubmitToolOutputsAsync(string threadId, string runId, SubmitToolOutputsRequest request, Action streamEventHandler = null, CancellationToken cancellationToken = default) { - using var jsonContent = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); - using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/runs/{runId}/submit_tool_outputs"), jsonContent, cancellationToken).ConfigureAwait(false); - var responseAsString = await response.ReadAsStringAsync(EnableDebug, jsonContent, null, cancellationToken).ConfigureAwait(false); + request.Stream = streamEventHandler != null; + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + var endpoint = GetUrl($"/{threadId}/runs/{runId}/submit_tool_outputs"); + + if (request.Stream) + { + return await StreamRunAsync(endpoint, payload, streamEventHandler, cancellationToken); + } + + using var response = await client.Client.PostAsync(endpoint, payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); return response.Deserialize(responseAsString, client); } @@ -345,13 +349,155 @@ public async Task RetrieveRunStepAsync(string threadId, string /// The id of the run to cancel. /// Optional, . /// . - public async Task CancelRunAsync(string threadId, string runId, CancellationToken cancellationToken = default) + public async Task CancelRunAsync(string threadId, string runId, CancellationToken cancellationToken = default) { - using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/runs/{runId}/cancel"), content: null, cancellationToken).ConfigureAwait(false); + using var response = await client.Client.PostAsync(GetUrl($"/{threadId}/runs/{runId}/cancel"), null!, cancellationToken).ConfigureAwait(false); var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); - return response.Deserialize(responseAsString, client); + var run = response.Deserialize(responseAsString, client); + + if (run.Status < RunStatus.Cancelling) + { + try + { + run = await run.WaitForStatusChangeAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (Exception) + { + // ignored + } + } + + return run.Status >= RunStatus.Cancelling; } #endregion Runs + + #region Files (Obsolete) + + /// + /// Returns a list of message files. + /// + /// The id of the thread that the message and files belong to. + /// The id of the message that the files belongs to. + /// . + /// Optional, . + /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] + public async Task> ListFilesAsync(string threadId, string messageId, ListQuery query = null, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl($"/{threadId}/messages/{messageId}/files", query), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); + return response.Deserialize>(responseAsString, client); + } + + /// + /// Retrieve message file. + /// + /// The id of the thread to which the message and file belong. + /// The id of the message the file belongs to. + /// The id of the file being retrieved. + /// Optional, . + /// . + [Obsolete("Files removed from Assistants. Files now belong to ToolResources.")] + public async Task RetrieveFileAsync(string threadId, string messageId, string fileId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl($"/{threadId}/messages/{messageId}/files/{fileId}"), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken: cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + #endregion Files (Obsolete) + + private async Task StreamRunAsync(string endpoint, StringContent payload, Action streamEventHandler, CancellationToken cancellationToken = default) + { + RunResponse run = null; + RunStepResponse runStep = null; + MessageResponse message = null; + using var response = await this.StreamEventsAsync(endpoint, payload, (sseResponse, ssEvent) => + { + try + { + switch (ssEvent.Value.GetValue()) + { + case "thread.created": + streamEventHandler?.Invoke(sseResponse.Deserialize(ssEvent, client)); + return; + case "thread.run.created": + case "thread.run.queued": + case "thread.run.in_progress": + case "thread.run.requires_action": + case "thread.run.completed": + case "thread.run.incomplete": + case "thread.run.failed": + case "thread.run.cancelling": + case "thread.run.cancelled": + case "thread.run.expired": + var partialRun = sseResponse.Deserialize(ssEvent, client); + if (run == null) + { + run = partialRun; + } + else + { + run.AppendFrom(partialRun); + } + + streamEventHandler?.Invoke(run); + return; + case "thread.run.step.created": + case "thread.run.step.in_progress": + case "thread.run.step.delta": + case "thread.run.step.completed": + case "thread.run.step.failed": + case "thread.run.step.cancelled": + case "thread.run.step.expired": + var partialRunStep = sseResponse.Deserialize(ssEvent, client); + if (runStep == null) + { + runStep = partialRunStep; + } + else + { + runStep.AppendFrom(partialRunStep); + } + + streamEventHandler?.Invoke(runStep); + return; + case "thread.message.created": + case "thread.message.in_progress": + case "thread.message.delta": + case "thread.message.completed": + case "thread.message.incomplete": + var partialMessage = sseResponse.Deserialize(ssEvent, client); + if (message == null) + { + message = partialMessage; + } + else + { + message.AppendFrom(partialMessage); + } + + streamEventHandler?.Invoke(message); + return; + case "error": + streamEventHandler?.Invoke(sseResponse.Deserialize(ssEvent, client)); + return; + default: + // if not properly handled raise it up to caller to deal with it themselves. + streamEventHandler.Invoke(ssEvent); + return; + } + } + catch (Exception e) + { + Console.WriteLine(e); + } + }, cancellationToken); + + if (run == null) { return null; } + run.SetResponseData(response.Headers, client); + return run; + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/ToolCall.cs b/OpenAI-DotNet/Threads/ToolCall.cs index df965248..7f0a11eb 100644 --- a/OpenAI-DotNet/Threads/ToolCall.cs +++ b/OpenAI-DotNet/Threads/ToolCall.cs @@ -1,11 +1,20 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. +using OpenAI.Extensions; +using System; +using System.Collections.Generic; using System.Text.Json.Serialization; namespace OpenAI.Threads { - public sealed class ToolCall + public sealed class ToolCall : IAppendable { + public ToolCall() { } + + [JsonInclude] + [JsonPropertyName("index")] + public int? Index { get; private set; } + /// /// The ID of the tool call. /// This ID must be referenced when you submit the tool outputs in using the Submit tool outputs to run endpoint. @@ -36,10 +45,62 @@ public sealed class ToolCall public CodeInterpreter CodeInterpreter { get; private set; } /// - /// For now, this is always going to be an empty object. + /// The File Search tool call definition. /// + /// + /// For now, this is always going to be an empty object. + /// [JsonInclude] - [JsonPropertyName("retrieval")] + [JsonPropertyName("file_search")] + public IReadOnlyDictionary FileSearch { get; private set; } + + /// + /// For now, this is always going to be an empty object. + /// + [JsonIgnore] + [Obsolete("Removed")] public object Retrieval { get; private set; } + + public void AppendFrom(ToolCall other) + { + if (other == null) + { + return; + } + + if (!string.IsNullOrWhiteSpace(other.Id)) + { + Id = other.Id; + } + + if (other.FunctionCall != null) + { + if (FunctionCall == null) + { + FunctionCall = other.FunctionCall; + } + else + { + FunctionCall.AppendFrom(other.FunctionCall); + } + } + + if (other.CodeInterpreter != null) + { + if (CodeInterpreter == null) + { + CodeInterpreter = other.CodeInterpreter; + } + else + { + CodeInterpreter.AppendFrom(other.CodeInterpreter); + } + } + + if (other.FileSearch != null) + { + FileSearch = other.FileSearch; + } + } } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/ToolOutput.cs b/OpenAI-DotNet/Threads/ToolOutput.cs index 76eb45f9..d82cb13c 100644 --- a/OpenAI-DotNet/Threads/ToolOutput.cs +++ b/OpenAI-DotNet/Threads/ToolOutput.cs @@ -36,5 +36,7 @@ public ToolOutput(string toolCallId, string output) /// [JsonPropertyName("output")] public string Output { get; } + + public override string ToString() => Output; } -} \ No newline at end of file +} diff --git a/OpenAI-DotNet/Threads/TruncationStrategies.cs b/OpenAI-DotNet/Threads/TruncationStrategies.cs new file mode 100644 index 00000000..60bab462 --- /dev/null +++ b/OpenAI-DotNet/Threads/TruncationStrategies.cs @@ -0,0 +1,14 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Runtime.Serialization; + +namespace OpenAI.Threads +{ + public enum TruncationStrategies + { + [EnumMember(Value = "auto")] + Auto = 0, + [EnumMember(Value = "last_messages")] + LastMessages + } +} diff --git a/OpenAI-DotNet/Threads/TruncationStrategy.cs b/OpenAI-DotNet/Threads/TruncationStrategy.cs new file mode 100644 index 00000000..2241e526 --- /dev/null +++ b/OpenAI-DotNet/Threads/TruncationStrategy.cs @@ -0,0 +1,27 @@ +using OpenAI.Extensions; +using System.Text.Json.Serialization; + +namespace OpenAI.Threads +{ + public sealed class TruncationStrategy + { + /// + /// The truncation strategy to use for the thread. + /// The default is 'auto'. If set to 'last_messages', + /// the thread will be truncated to the n most recent messages in the thread. When set to 'auto', + /// messages in the middle of the thread will be dropped to fit the context length of the model, 'max_prompt_tokens'. + /// + [JsonInclude] + [JsonPropertyName("type")] + [JsonConverter(typeof(JsonStringEnumConverter))] + public TruncationStrategies Type { get; private set; } + + /// + /// The number of most recent messages from the thread when constructing the context for the run. + /// + [JsonInclude] + [JsonPropertyName("last_messages")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int? LastMessages { get; private set; } + } +} diff --git a/OpenAI-DotNet/VectorStores/ChunkingStrategy.cs b/OpenAI-DotNet/VectorStores/ChunkingStrategy.cs new file mode 100644 index 00000000..eef15bfe --- /dev/null +++ b/OpenAI-DotNet/VectorStores/ChunkingStrategy.cs @@ -0,0 +1,34 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Extensions; +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + public sealed class ChunkingStrategy + { + public ChunkingStrategy() { } + + public ChunkingStrategy(ChunkingStrategyType type) + { + Type = type; + + switch (Type) + { + case ChunkingStrategyType.Static: + Static = new ChunkingStrategyStatic(); + break; + } + } + + [JsonInclude] + [JsonPropertyName("type")] + [JsonConverter(typeof(JsonStringEnumConverter))] + public ChunkingStrategyType Type { get; private set; } + + [JsonInclude] + [JsonPropertyName("static")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ChunkingStrategyStatic Static { get; private set; } + } +} diff --git a/OpenAI-DotNet/VectorStores/ChunkingStrategyStatic.cs b/OpenAI-DotNet/VectorStores/ChunkingStrategyStatic.cs new file mode 100644 index 00000000..4ce0f5b6 --- /dev/null +++ b/OpenAI-DotNet/VectorStores/ChunkingStrategyStatic.cs @@ -0,0 +1,45 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + public sealed class ChunkingStrategyStatic + { + /// + /// Constructor. + /// + /// + /// The maximum number of tokens in each chunk. + /// The default value is 800. + /// The minimum value is 100 and the maximum value is 4096. + /// + /// + /// The number of tokens that overlap between chunks. + /// The default value is 400. + /// Note that the overlap must not exceed half of max_chunk_size_tokens. + /// + [JsonConstructor] + public ChunkingStrategyStatic(int? maxChunkSizeTokens = null, int? chunkOverlapTokens = null) + { + MaxChunkSizeTokens = maxChunkSizeTokens ?? 800; + ChunkOverlapTokens = chunkOverlapTokens ?? 400; + } + + /// + /// The maximum number of tokens in each chunk. + /// The default value is 800. + /// The minimum value is 100 and the maximum value is 4096. + /// + [JsonPropertyName("max_chunk_size_tokens")] + public int? MaxChunkSizeTokens { get; } + + /// + /// The number of tokens that overlap between chunks. + /// The default value is 400. + /// Note that the overlap must not exceed half of max_chunk_size_tokens. + /// + [JsonPropertyName("chunk_overlap_tokens")] + public int? ChunkOverlapTokens { get; } + } +} diff --git a/OpenAI-DotNet/VectorStores/ChunkingStrategyType.cs b/OpenAI-DotNet/VectorStores/ChunkingStrategyType.cs new file mode 100644 index 00000000..e3d2a35b --- /dev/null +++ b/OpenAI-DotNet/VectorStores/ChunkingStrategyType.cs @@ -0,0 +1,24 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Runtime.Serialization; + +namespace OpenAI.VectorStores +{ + public enum ChunkingStrategyType + { + /// + /// The default strategy. + /// This strategy currently uses a 'max_chunk_size_tokens' of '800' and 'chunk_overlap_tokens' of '400'. + /// + [EnumMember(Value = "auto")] + Auto, + /// + /// This is returned when the chunking strategy is unknown. + /// Typically, this is because the file was indexed before the 'chunking_strategy' concept was introduced in the API. + /// + [EnumMember(Value = "other")] + Other, + [EnumMember(Value = "static")] + Static + } +} diff --git a/OpenAI-DotNet/VectorStores/CreateVectorStoreRequest.cs b/OpenAI-DotNet/VectorStores/CreateVectorStoreRequest.cs new file mode 100644 index 00000000..70466e1b --- /dev/null +++ b/OpenAI-DotNet/VectorStores/CreateVectorStoreRequest.cs @@ -0,0 +1,81 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Files; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + public sealed class CreateVectorStoreRequest + { + /// + /// Constructor. + /// + /// + /// Custom name for the vector store. + /// + /// + /// A list of file IDs to add to the vector store. + /// There can be a maximum of 10000 files in a vector store. + /// + /// + /// + /// The chunking strategy used to chunk the file(s). If not set, will use the auto strategy. Only applicable if file_ids is non-empty. + /// + /// + /// Optional, set of 16 key-value pairs that can be attached to a vector store. + /// This can be useful for storing additional information about the vector store in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + public CreateVectorStoreRequest(string name = null, IReadOnlyList fileIds = null, int? expiresAfter = null, ChunkingStrategy chunkingStrategy = null, IReadOnlyDictionary metadata = null) + { + Name = name; + FileIds = fileIds; + ExpiresAfter = expiresAfter.HasValue ? new ExpirationPolicy(expiresAfter.Value) : null; + ChunkingStrategy = chunkingStrategy; + Metadata = metadata; + } + + /// + public CreateVectorStoreRequest(string name, IReadOnlyList files, int? expiresAfter = null, ChunkingStrategy chunkingStrategy = null, IReadOnlyDictionary metadata = null) + : this(name, files?.Select(file => file.Id).ToList(), expiresAfter, chunkingStrategy, metadata) + { + } + + /// + /// Custom name for the vector store. + /// + [JsonPropertyName("name")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public string Name { get; } + + /// + /// A list of file IDs to add to the vector store. + /// There can be a maximum of 10000 files in a vector store. + /// + [JsonPropertyName("file_ids")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyList FileIds { get; } + + /// + /// The expiration policy for a vector store. + /// + [JsonPropertyName("expires_after")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ExpirationPolicy ExpiresAfter { get; } + + [JsonPropertyName("chunking_strategy")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ChunkingStrategy ChunkingStrategy { get; } + + /// + /// Set of 16 key-value pairs that can be attached to a vector store. + /// This can be useful for storing additional information about the vector store in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyDictionary Metadata { get; } + } +} diff --git a/OpenAI-DotNet/VectorStores/ExpirationPolicy.cs b/OpenAI-DotNet/VectorStores/ExpirationPolicy.cs new file mode 100644 index 00000000..33b42041 --- /dev/null +++ b/OpenAI-DotNet/VectorStores/ExpirationPolicy.cs @@ -0,0 +1,40 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + /// + /// The expiration policy for a vector store. + /// + public sealed class ExpirationPolicy + { + public static implicit operator ExpirationPolicy(int days) => new(days); + + /// + /// Constructor. + /// + /// + /// The number of days after the anchor time that the vector store will expire. + /// + public ExpirationPolicy(int days) + { + Days = days; + } + + /// + /// Anchor timestamp after which the expiration policy applies. + /// Supported anchors: 'last_active_at'. + /// + [JsonInclude] + [JsonPropertyName("anchor")] + public string Anchor { get; private set; } = "last_active_at"; + + /// + /// The number of days after the anchor time that the vector store will expire. + /// + [JsonInclude] + [JsonPropertyName("days")] + public int Days { get; private set; } + } +} diff --git a/OpenAI-DotNet/VectorStores/FileCounts.cs b/OpenAI-DotNet/VectorStores/FileCounts.cs new file mode 100644 index 00000000..9ea00c50 --- /dev/null +++ b/OpenAI-DotNet/VectorStores/FileCounts.cs @@ -0,0 +1,44 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + public sealed class FileCounts + { + /// + /// The number of files that are currently being processed. + /// + [JsonInclude] + [JsonPropertyName("in_progress")] + public int InProgress { get; private set; } + + /// + /// The number of files that have been successfully processed. + /// + [JsonInclude] + [JsonPropertyName("completed")] + public int Completed { get; private set; } + + /// + /// The number of files that have failed to process. + /// + [JsonInclude] + [JsonPropertyName("failed")] + public int Failed { get; private set; } + + /// + /// The number of files that were cancelled. + /// + [JsonInclude] + [JsonPropertyName("cancelled")] + public int Cancelled { get; private set; } + + /// + /// The total number of files. + /// + [JsonInclude] + [JsonPropertyName("total")] + public int Total { get; private set; } + } +} diff --git a/OpenAI-DotNet/VectorStores/VectorStoreExtensions.cs b/OpenAI-DotNet/VectorStores/VectorStoreExtensions.cs new file mode 100644 index 00000000..421e79d4 --- /dev/null +++ b/OpenAI-DotNet/VectorStores/VectorStoreExtensions.cs @@ -0,0 +1,44 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace OpenAI.VectorStores +{ + public static class VectorStoreExtensions + { + /// + /// Get the latest status of the . + /// + /// . + /// Optional, . + /// . + public static async Task UpdateAsync(this VectorStoreFileBatchResponse vectorStoreFileBatchResponse, CancellationToken cancellationToken = default) + => await vectorStoreFileBatchResponse.Client.VectorStoresEndpoint.GetVectorStoreFileBatchAsync(vectorStoreFileBatchResponse.VectorStoreId, vectorStoreFileBatchResponse.Id, cancellationToken).ConfigureAwait(false); + + /// + /// Waits for to change. + /// + /// . + /// Optional, time in milliseconds to wait before polling status. + /// Optional, timeout in seconds to cancel polling.
Defaults to 30 seconds.
Set to -1 for indefinite. + /// Optional, . + /// . + public static async Task WaitForStatusChangeAsync(this VectorStoreFileBatchResponse vectorStoreFileBatchResponse, int? pollingInterval = null, int? timeout = null, CancellationToken cancellationToken = default) + { + using CancellationTokenSource cts = timeout is < 0 + ? new CancellationTokenSource() + : new CancellationTokenSource(TimeSpan.FromSeconds(timeout ?? 30)); + using var chainedCts = CancellationTokenSource.CreateLinkedTokenSource(cts.Token, cancellationToken); + VectorStoreFileBatchResponse result; + do + { + await Task.Delay(pollingInterval ?? 500, chainedCts.Token).ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); + result = await vectorStoreFileBatchResponse.UpdateAsync(cancellationToken: chainedCts.Token).ConfigureAwait(false); + } while (result.Status is VectorStoreFileStatus.NotStarted or VectorStoreFileStatus.InProgress or VectorStoreFileStatus.Cancelling); + return result; + } + } +} diff --git a/OpenAI-DotNet/VectorStores/VectorStoreFileBatchResponse.cs b/OpenAI-DotNet/VectorStores/VectorStoreFileBatchResponse.cs new file mode 100644 index 00000000..91f2b8ad --- /dev/null +++ b/OpenAI-DotNet/VectorStores/VectorStoreFileBatchResponse.cs @@ -0,0 +1,59 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + /// + /// A batch of files attached to a vector store. + /// + public sealed class VectorStoreFileBatchResponse : BaseResponse + { + /// + /// The identifier, which can be referenced in API endpoints. + /// + [JsonInclude] + [JsonPropertyName("id")] + public string Id { get; private set; } + + /// + /// The object type, which is always `vector_store.file_batch`. + /// + [JsonInclude] + [JsonPropertyName("object")] + public string Object { get; private set; } + + /// + /// The Unix timestamp (in seconds) for when the vector store files batch was created. + /// + [JsonInclude] + [JsonPropertyName("created_at")] + public int CreatedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTime CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds).DateTime; + + /// + /// The ID of the vector store that the files is attached to. + /// + [JsonInclude] + [JsonPropertyName("vector_store_id")] + public string VectorStoreId { get; private set; } + + /// + /// The status of the vector store files batch, which can be either `in_progress`, `completed`, `cancelled` or `failed`. + /// + [JsonInclude] + [JsonPropertyName("status")] + public VectorStoreFileStatus Status { get; private set; } + + [JsonInclude] + [JsonPropertyName("file_counts")] + public FileCounts FileCounts { get; private set; } + + public override string ToString() => Id; + + public static implicit operator string(VectorStoreFileBatchResponse response) => response?.ToString(); + } +} diff --git a/OpenAI-DotNet/VectorStores/VectorStoreFileResponse.cs b/OpenAI-DotNet/VectorStores/VectorStoreFileResponse.cs new file mode 100644 index 00000000..ce3cff83 --- /dev/null +++ b/OpenAI-DotNet/VectorStores/VectorStoreFileResponse.cs @@ -0,0 +1,77 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + /// + /// A list of files attached to a vector store. + /// + public sealed class VectorStoreFileResponse : BaseResponse + { + /// + /// The identifier, which can be referenced in API endpoints. + /// + [JsonInclude] + [JsonPropertyName("id")] + public string Id { get; private set; } + + /// + /// The object type, which is always 'vector_store.file'. + /// + [JsonInclude] + [JsonPropertyName("object")] + public string Object { get; private set; } + + /// + /// The total vector store usage in bytes. Note that this may be different from the original file size. + /// + [JsonInclude] + [JsonPropertyName("usage_bytes")] + public long UsageBytes { get; private set; } + + /// + /// The Unix timestamp (in seconds) for when the vector store file was created. + /// + [JsonInclude] + [JsonPropertyName("created_at")] + public int CreatedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTimeOffset CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds); + + /// + /// The ID of the vector store that the file is attached to. + /// + [JsonInclude] + [JsonPropertyName("vector_store_id")] + public string VectorStoreId { get; private set; } + + /// + /// The status of the vector store file, which can be either 'in_progress', 'completed', 'cancelled', or 'failed'. + /// The status 'completed' indicates that the vector store file is ready for use. + /// + [JsonInclude] + [JsonPropertyName("status")] + public VectorStoreFileStatus Status { get; private set; } + + /// + /// The last error associated with this vector store file. Will be 'null' if there are no errors. + /// + [JsonInclude] + [JsonPropertyName("last_error")] + public Error LastError { get; private set; } + + /// + /// The strategy used to chunk the file. + /// + [JsonInclude] + [JsonPropertyName("chunking_strategy")] + public ChunkingStrategy ChunkingStrategy { get; private set; } + + public override string ToString() => Id; + + public static implicit operator string(VectorStoreFileResponse response) => response?.ToString(); + } +} diff --git a/OpenAI-DotNet/VectorStores/VectorStoreFileStatus.cs b/OpenAI-DotNet/VectorStores/VectorStoreFileStatus.cs new file mode 100644 index 00000000..4657455b --- /dev/null +++ b/OpenAI-DotNet/VectorStores/VectorStoreFileStatus.cs @@ -0,0 +1,21 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Runtime.Serialization; + +namespace OpenAI.VectorStores +{ + public enum VectorStoreFileStatus + { + NotStarted = 0, + [EnumMember(Value = "in_progress")] + InProgress, + [EnumMember(Value = "cancelling")] + Cancelling, + [EnumMember(Value = "cancelled")] + Cancelled, + [EnumMember(Value = "completed")] + Completed, + [EnumMember(Value = "failed")] + Failed, + } +} diff --git a/OpenAI-DotNet/VectorStores/VectorStoreResponse.cs b/OpenAI-DotNet/VectorStores/VectorStoreResponse.cs new file mode 100644 index 00000000..2e8d8e62 --- /dev/null +++ b/OpenAI-DotNet/VectorStores/VectorStoreResponse.cs @@ -0,0 +1,110 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace OpenAI.VectorStores +{ + /// + /// A vector store is a collection of processed files can be used by the 'file_search' tool. + /// + public sealed class VectorStoreResponse : BaseResponse + { + /// + /// The identifier, which can be referenced in API endpoints. + /// + [JsonInclude] + [JsonPropertyName("id")] + public string Id { get; private set; } + + /// + /// The object type, which is always 'vector_store'. + /// + [JsonInclude] + [JsonPropertyName("object")] + public string Object { get; private set; } + + /// + /// The Unix timestamp (in seconds) for when the vector store was created. + /// + [JsonInclude] + [JsonPropertyName("created_at")] + public int CreatedAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTimeOffset CreatedAt => DateTimeOffset.FromUnixTimeSeconds(CreatedAtUnixTimeSeconds); + + /// + /// The name of the vector store. + /// + [JsonInclude] + [JsonPropertyName("name")] + public string Name { get; private set; } + + /// + /// The total number of bytes used by the files in the vector store. + /// + [JsonInclude] + [JsonPropertyName("usage_bytes")] + public long UsageBytes { get; private set; } + + [JsonInclude] + [JsonPropertyName("file_counts")] + public FileCounts FileCounts { get; private set; } + + /// + /// The status of the vector store, which can be either 'expired', 'in_progress', or 'completed'. + /// A status of 'completed' indicates that the vector store is ready for use. + /// + [JsonInclude] + [JsonPropertyName("status")] + public VectorStoreStatus Status { get; private set; } + + /// + /// The expiration policy for a vector store. + /// + [JsonInclude] + [JsonPropertyName("expires_after")] + public ExpirationPolicy ExpirationPolicy { get; private set; } + + /// + /// The Unix timestamp (in seconds) for when the vector store will expire. + /// + [JsonInclude] + [JsonPropertyName("expires_at")] + public int? ExpiresAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTimeOffset? ExpiresAt + => ExpiresAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(ExpiresAtUnixTimeSeconds.Value) + : null; + + /// + /// The Unix timestamp (in seconds) for when the vector store was last active. + /// + [JsonInclude] + [JsonPropertyName("last_active_at")] + public int? LastActiveAtUnixTimeSeconds { get; private set; } + + [JsonIgnore] + public DateTimeOffset? LastActiveAt + => LastActiveAtUnixTimeSeconds.HasValue + ? DateTimeOffset.FromUnixTimeSeconds(LastActiveAtUnixTimeSeconds.Value) + : null; + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + [JsonInclude] + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; private set; } + + public override string ToString() => Id; + + public static implicit operator string(VectorStoreResponse response) => response?.ToString(); + } +} diff --git a/OpenAI-DotNet/VectorStores/VectorStoreStatus.cs b/OpenAI-DotNet/VectorStores/VectorStoreStatus.cs new file mode 100644 index 00000000..0a387312 --- /dev/null +++ b/OpenAI-DotNet/VectorStores/VectorStoreStatus.cs @@ -0,0 +1,17 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using System.Runtime.Serialization; + +namespace OpenAI.VectorStores +{ + public enum VectorStoreStatus + { + NotStarted = 0, + [EnumMember(Value = "in_progress")] + InProgress, + [EnumMember(Value = "completed")] + Completed, + [EnumMember(Value = "failed")] + Expired + } +} diff --git a/OpenAI-DotNet/VectorStores/VectorStoresEndpoint.cs b/OpenAI-DotNet/VectorStores/VectorStoresEndpoint.cs new file mode 100644 index 00000000..d863720b --- /dev/null +++ b/OpenAI-DotNet/VectorStores/VectorStoresEndpoint.cs @@ -0,0 +1,299 @@ +// Licensed under the MIT License. See LICENSE in the project root for license information. + +using OpenAI.Extensions; +using OpenAI.Files; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace OpenAI.VectorStores +{ + /// + /// Vector stores are used to store files for use by the file_search tool. + /// + /// + public sealed class VectorStoresEndpoint : OpenAIBaseEndpoint + { + public VectorStoresEndpoint(OpenAIClient client) : base(client) { } + + protected override string Root => "vector_stores"; + + /// + /// Creates a new Vector Store. + /// + /// . + /// Optional . + /// . + public async Task CreateVectorStoreAsync(CreateVectorStoreRequest request, CancellationToken cancellationToken = default) + { + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Returns a list of vector stores. + /// + /// Optional, . + /// Optional, . + /// . + public async Task> ListVectorStoresAsync(ListQuery query = null, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize>(responseAsString, client); + } + + /// + /// Get a vector store. + /// + /// + /// The ID of the vector store to retrieve. + /// + /// Optional, . + /// . + public async Task GetVectorStoreAsync(string vectorStoreId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl($"/{vectorStoreId}"), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Modifies a vector store. + /// + /// + /// The ID of the vector store to retrieve. + /// + /// + /// Optional, name of the vector store. + /// + /// + /// The number of days after the anchor time that the vector store will expire. + /// + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. + /// + /// Optional, . + /// . + public async Task ModifyVectorStoreAsync(string vectorStoreId, string name = null, int? expiresAfter = null, IReadOnlyDictionary metadata = null, CancellationToken cancellationToken = default) + { + var expirationPolicy = expiresAfter.HasValue ? new ExpirationPolicy(expiresAfter.Value) : null; + var request = new { name, expires_after = expirationPolicy, metadata }; + using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{vectorStoreId}"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Delete a vector store. + /// + /// + /// The ID of the vector store to retrieve. + /// + /// Optional, . + /// True, if the vector store was successfully deleted. + public async Task DeleteVectorStoreAsync(string vectorStoreId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.DeleteAsync(GetUrl($"/{vectorStoreId}"), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client)?.Deleted ?? false; + } + + #region Files + + /// + /// Create a vector store file by attaching a File to a vector store. + /// + /// The ID of the vector store that the file belongs to. + /// + /// A File ID that the vector store should use. + /// Useful for tools like file_search that can access files. + /// + /// + /// A file id that the vector store should use. Useful for tools like 'file_search' that can access files. + /// + /// Optional, . + /// . + public async Task CreateVectorStoreFileAsync(string vectorStoreId, string fileId, ChunkingStrategy chunkingStrategy = null, CancellationToken cancellationToken = default) + { + using var payload = JsonSerializer.Serialize(new { file_id = fileId, chunking_strategy = chunkingStrategy }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{vectorStoreId}/files"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Returns a list of vector store files. + /// + /// The ID of the vector store that the file belongs to. + /// Optional, . + /// Optional, Filter by file status filter. + /// Optional, . + /// . + public async Task> ListVectorStoreFilesAsync(string vectorStoreId, ListQuery query = null, VectorStoreFileStatus? filter = null, CancellationToken cancellationToken = default) + { + Dictionary queryParams = query; + + if (filter.HasValue) + { + queryParams ??= new(); + queryParams.Add("filter", $"{filter.Value}"); + } + + using var response = await client.Client.GetAsync(GetUrl($"/{vectorStoreId}/files", queryParams), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize>(responseAsString, client); + } + + /// + /// Retrieves a vector store file. + /// + /// The ID of the vector store that the file belongs to. + /// The ID of the file being retrieved. + /// Optional, . + /// . + public async Task GetVectorStoreFileAsync(string vectorStoreId, string fileId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl($"/{vectorStoreId}/files/{fileId}"), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Delete a vector store file. + /// This will remove the file from the vector store but the file itself will not be deleted. + /// To delete the file, use the delete file endpoint. + /// + /// The ID of the vector store that the file belongs to. + /// The ID of the file being deleted. + /// Optional, . + /// True, if the vector store file was successfully deleted. + public async Task DeleteVectorStoreFileAsync(string vectorStoreId, string fileId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.DeleteAsync(GetUrl($"/{vectorStoreId}/files/{fileId}"), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client)?.Deleted ?? false; + } + + #endregion Files + + #region Batches + + /// + /// Create a vector store file batch. + /// + /// + /// The ID of the vector store for which to create a File Batch. + /// + /// + /// A list of File IDs that the vector store should use. Useful for tools like file_search that can access files. + /// + /// + /// A file id that the vector store should use. Useful for tools like 'file_search' that can access files. + /// + /// Optional, . + /// . + public async Task CreateVectorStoreFileBatchAsync(string vectorStoreId, IReadOnlyList fileIds, ChunkingStrategy chunkingStrategy = null, CancellationToken cancellationToken = default) + { + if (fileIds is not { Count: not 0 }) { throw new ArgumentNullException(nameof(fileIds)); } + using var payload = JsonSerializer.Serialize(new { file_ids = fileIds, chunking_strategy = chunkingStrategy }, OpenAIClient.JsonSerializationOptions).ToJsonStringContent(); + using var response = await client.Client.PostAsync(GetUrl($"/{vectorStoreId}/file_batches"), payload, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Create a vector store file batch. + /// + /// + /// The ID of the vector store for which to create a File Batch. + /// + /// + /// A list of Files that the vector store should use. Useful for tools like file_search that can access files. + /// + /// + /// A file id that the vector store should use. Useful for tools like 'file_search' that can access files. + /// + /// Optional, . + /// . + public async Task CreateVectorStoreFileBatchAsync(string vectorStoreId, IReadOnlyList files, ChunkingStrategy chunkingStrategy = null, CancellationToken cancellationToken = default) + => await CreateVectorStoreFileBatchAsync(vectorStoreId, files?.Select(file => file.Id).ToList(), chunkingStrategy, cancellationToken).ConfigureAwait(false); + + /// + /// Returns a list of vector store files in a batch. + /// + /// The ID of the vector store that the files belong to. + /// The ID of the file batch. + /// Optional, . + /// Optional, filter by file status. + /// Optional, . + /// . + public async Task> ListVectorStoreBatchFilesAsync(string vectorStoreId, string fileBatchId, ListQuery query = null, VectorStoreFileStatus? filter = null, CancellationToken cancellationToken = default) + { + Dictionary queryParams = query; + + if (filter != null) + { + queryParams ??= new(); + queryParams.Add("filter", $"{filter.Value}"); + } + + using var response = await client.Client.GetAsync(GetUrl($"/{vectorStoreId}/file_batches/{fileBatchId}/files", queryParams), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize>(responseAsString, client); + } + + /// + /// Retrieves a vector store file batch. + /// + /// The ID of the vector store that the files belong to. + /// The ID of the file batch being retrieved. + /// Optional, . + /// . + public async Task GetVectorStoreFileBatchAsync(string vectorStoreId, string fileBatchId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.GetAsync(GetUrl($"/{vectorStoreId}/file_batches/{fileBatchId}"), cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + return response.Deserialize(responseAsString, client); + } + + /// + /// Cancel a vector store file batch. + /// This attempts to cancel the processing of files in this batch as soon as possible. + /// + /// The ID of the vector store that the files belong to. + /// The ID of the file batch being retrieved. + /// Optional, . + /// True, if the vector store file batch was cancelled, otherwise false. + public async Task CancelVectorStoreFileBatchAsync(string vectorStoreId, string fileBatchId, CancellationToken cancellationToken = default) + { + using var response = await client.Client.PostAsync(GetUrl($"/{vectorStoreId}/file_batches/{fileBatchId}/cancel"), null!, cancellationToken).ConfigureAwait(false); + var responseAsString = await response.ReadAsStringAsync(EnableDebug, cancellationToken).ConfigureAwait(false); + var result = response.Deserialize(responseAsString, client); + + if (result.Status < VectorStoreFileStatus.Cancelling) + { + try + { + result = await result.WaitForStatusChangeAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (Exception) + { + // ignored + } + } + + return result.Status >= VectorStoreFileStatus.Cancelling; + } + + #endregion Batches + } +} diff --git a/README.md b/README.md index c0748e20..e01e94c1 100644 --- a/README.md +++ b/README.md @@ -38,11 +38,11 @@ Install-Package OpenAI-DotNet > Check out our new api docs! -https://rageagainstthepixel.github.io/OpenAI-DotNet :new: + :new: ### Table of Contents -- [Authentication](#authentication) +- [Authentication](#authentication) :new: :warning: :construction: - [OpenAIClient](#handling-openaiclient-and-httpclient-lifecycle) - [Azure OpenAI](#azure-openai) - [Azure Active Directory Authentication](#azure-active-directory-authentication) @@ -51,41 +51,51 @@ https://rageagainstthepixel.github.io/OpenAI-DotNet :new: - [List Models](#list-models) - [Retrieve Models](#retrieve-model) - [Delete Fine Tuned Model](#delete-fine-tuned-model) -- [Assistants](#assistants) +- [Assistants](#assistants) :new: :warning: :construction: - [List Assistants](#list-assistants) - [Create Assistant](#create-assistant) - [Retrieve Assistant](#retrieve-assistant) - [Modify Assistant](#modify-assistant) - [Delete Assistant](#delete-assistant) - - [List Assistant Files](#list-assistant-files) - - [Attach File to Assistant](#attach-file-to-assistant) - - [Upload File to Assistant](#upload-file-to-assistant) - - [Retrieve File from Assistant](#retrieve-file-from-assistant) - - [Remove File from Assistant](#remove-file-from-assistant) - - [Delete File from Assistant](#delete-file-from-assistant) -- [Threads](#threads) - - [Create Thread](#create-thread) - - [Create Thread and Run](#create-thread-and-run) - - [Retrieve Thread](#retrieve-thread) - - [Modify Thread](#modify-thread) - - [Delete Thread](#delete-thread) - - [Thread Messages](#thread-messages) - - [List Messages](#list-thread-messages) - - [Create Message](#create-thread-message) - - [Retrieve Message](#retrieve-thread-message) - - [Modify Message](#modify-thread-message) - - [Thread Message Files](#thread-message-files) - - [List Message Files](#list-thread-message-files) - - [Retrieve Message File](#retrieve-thread-message-file) - - [Thread Runs](#thread-runs) - - [List Runs](#list-thread-runs) - - [Create Run](#create-thread-run) - - [Retrieve Run](#retrieve-thread-run) - - [Modify Run](#modify-thread-run) - - [Submit Tool Outputs to Run](#thread-submit-tool-outputs-to-run) - - [List Run Steps](#list-thread-run-steps) - - [Retrieve Run Step](#retrieve-thread-run-step) - - [Cancel Run](#cancel-thread-run) + - [Assistant Streaming](#assistant-streaming) :new: + - [Threads](#threads) :new: :warning: :construction: + - [Create Thread](#create-thread) + - [Create Thread and Run](#create-thread-and-run) + - [Streaming](#create-thread-and-run-streaming) :new: + - [Retrieve Thread](#retrieve-thread) + - [Modify Thread](#modify-thread) + - [Delete Thread](#delete-thread) + - [Thread Messages](#thread-messages) + - [List Messages](#list-thread-messages) + - [Create Message](#create-thread-message) + - [Retrieve Message](#retrieve-thread-message) + - [Modify Message](#modify-thread-message) + - [Thread Runs](#thread-runs) + - [List Runs](#list-thread-runs) + - [Create Run](#create-thread-run) + - [Streaming](#create-thread-run-streaming) :new: + - [Retrieve Run](#retrieve-thread-run) + - [Modify Run](#modify-thread-run) + - [Submit Tool Outputs to Run](#thread-submit-tool-outputs-to-run) + - [List Run Steps](#list-thread-run-steps) + - [Retrieve Run Step](#retrieve-thread-run-step) + - [Cancel Run](#cancel-thread-run) + - [Vector Stores](#vector-stores) :new: + - [List Vector Stores](#list-vector-stores) :new: + - [Create Vector Store](#create-vector-store) :new: + - [Retrieve Vector Store](#retrieve-vector-store) :new: + - [Modify Vector Store](#modify-vector-store) :new: + - [Delete Vector Store](#delete-vector-store) :new: + - [Vector Store Files](#vector-store-files) :new: + - [List Vector Store Files](#list-vector-store-files) :new: + - [Create Vector Store File](#create-vector-store-file) :new: + - [Retrieve Vector Store File](#retrieve-vector-store-file) :new: + - [Delete Vector Store File](#delete-vector-store-file) :new: + - [Vector Store File Batches](#vector-store-file-batches) :new: + - [Create Vector Store File Batch](#create-vector-store-file-batch) :new: + - [Retrieve Vector Store File Batch](#retrieve-vector-store-file-batch) :new: + - [List Files In Vector Store Batch](#list-files-in-vector-store-batch) :new: + - [Cancel Vector Store File Batch](#cancel-vector-store-file-batch) :new: - [Chat](#chat) - [Chat Completions](#chat-completions) - [Streaming](#chat-streaming) @@ -96,7 +106,7 @@ https://rageagainstthepixel.github.io/OpenAI-DotNet :new: - [Create Speech](#create-speech) - [Create Transcription](#create-transcription) - [Create Translation](#create-translation) -- [Images](#images) +- [Images](#images) :warning: :construction: - [Create Image](#create-image) - [Edit Image](#edit-image) - [Create Image Variation](#create-image-variation) @@ -112,6 +122,11 @@ https://rageagainstthepixel.github.io/OpenAI-DotNet :new: - [Retrieve Fine Tune Job Info](#retrieve-fine-tune-job-info) - [Cancel Fine Tune Job](#cancel-fine-tune-job) - [List Fine Tune Job Events](#list-fine-tune-job-events) +- [Batches](#batches) :new: + - [List Batches](#list-batches) :new: + - [Create Batch](#create-batch) :new: + - [Retrieve Batch](#retrieve-batch) :new: + - [Cancel Batch](#cancel-batch) :new: - [Embeddings](#embeddings) - [Create Embedding](#create-embeddings) - [Moderations](#moderations) @@ -121,7 +136,10 @@ https://rageagainstthepixel.github.io/OpenAI-DotNet :new: There are 3 ways to provide your API keys, in order of precedence: -1. [Pass keys directly with constructor](#pass-keys-directly-with-constructor) +> [!WARNING] +> We recommended using the environment variables to load the API key instead of having it hard coded in your source. It is not recommended use this method in production, but only for accepting user credentials, local testing and quick start scenarios. + +1. [Pass keys directly with constructor](#pass-keys-directly-with-constructor) :warning: 2. [Load key from configuration file](#load-key-from-configuration-file) 3. [Use System Environment Variables](#use-system-environment-variables) @@ -129,7 +147,8 @@ You use the `OpenAIAuthentication` when you initialize the API as shown: #### Pass keys directly with constructor -:warning: We recommended using the environment variables to load the API key instead of having it hard coded in your source. It is not recommended use this method in production, but only for accepting user credentials, local testing and quick start scenarios. +> [!WARNING] +> We recommended using the environment variables to load the API key instead of having it hard coded in your source. It is not recommended use this method in production, but only for accepting user credentials, local testing and quick start scenarios. ```csharp using var api = new OpenAIClient("sk-apiKey"); @@ -138,7 +157,7 @@ using var api = new OpenAIClient("sk-apiKey"); Or create a `OpenAIAuthentication` object manually ```csharp -using var api = new OpenAIClient(new OpenAIAuthentication("sk-apiKey", "org-yourOrganizationId")); +using var api = new OpenAIClient(new OpenAIAuthentication("sk-apiKey", "org-yourOrganizationId", "proj_yourProjectId")); ``` #### Load key from configuration file @@ -147,22 +166,25 @@ Attempts to load api keys from a configuration file, by default `.openai` in the To create a configuration file, create a new text file named `.openai` and containing the line: -> Organization entry is optional. +> [!NOTE] +> Organization and project id entries are optional. ##### Json format ```json { "apiKey": "sk-aaaabbbbbccccddddd", - "organization": "org-yourOrganizationId" + "organizationId": "org-yourOrganizationId", + "projectId": "proj_yourProjectId" } ``` ##### Deprecated format ```shell -OPENAI_KEY=sk-aaaabbbbbccccddddd -ORGANIZATION=org-yourOrganizationId +OPENAI_API_KEY=sk-aaaabbbbbccccddddd +OPENAI_ORGANIZATION_ID=org-yourOrganizationId +OPENAI_PROJECT_ID=proj_yourProjectId ``` You can also load the configuration file directly with known path by calling static methods in `OpenAIAuthentication`: @@ -185,6 +207,7 @@ Use your system's environment variables specify an api key and organization to u - Use `OPENAI_API_KEY` for your api key. - Use `OPENAI_ORGANIZATION_ID` to specify an organization. +- Use `OPENAI_PROJECT_ID` to specify a project. ```csharp using var api = new OpenAIClient(OpenAIAuthentication.LoadFromEnv()); @@ -364,7 +387,7 @@ Retrieves a model instance, providing basic information about the model such as ```csharp using var api = new OpenAIClient(); -var model = await api.ModelsEndpoint.GetModelDetailsAsync("gpt-4-turbo"); +var model = await api.ModelsEndpoint.GetModelDetailsAsync("gpt-4o"); Console.WriteLine(model.ToString()); ``` @@ -380,7 +403,8 @@ Assert.IsTrue(isDeleted); ### [Assistants](https://platform.openai.com/docs/api-reference/assistants) -> :warning: Beta Feature +> [!WARNING] +> Beta Feature. API subject to breaking changes. Build assistants that can call models and use tools to perform tasks. @@ -409,7 +433,7 @@ Create an assistant with a model and instructions. ```csharp using var api = new OpenAIClient(); -var request = new CreateAssistantRequest(Model.GPT4_Turbo); +var request = new CreateAssistantRequest(Model.GPT4o); var assistant = await api.AssistantsEndpoint.CreateAssistantAsync(request); ``` @@ -429,9 +453,9 @@ Modifies an assistant. ```csharp using var api = new OpenAIClient(); -var createRequest = new CreateAssistantRequest(Model.GPT3_5_Turbo); +var createRequest = new CreateAssistantRequest(Model.GPT4_Turbo); var assistant = await api.AssistantsEndpoint.CreateAssistantAsync(createRequest); -var modifyRequest = new CreateAssistantRequest(Model.GPT4_Turbo); +var modifyRequest = new CreateAssistantRequest(Model.GPT4o); var modifiedAssistant = await api.AssistantsEndpoint.ModifyAssistantAsync(assistant.Id, modifyRequest); // OR AssistantExtension for easier use! var modifiedAssistantEx = await assistant.ModifyAsync(modifyRequest); @@ -449,97 +473,18 @@ var isDeleted = await assistant.DeleteAsync(); Assert.IsTrue(isDeleted); ``` -#### [List Assistant Files](https://platform.openai.com/docs/api-reference/assistants/listAssistantFiles) - -Returns a list of assistant files. - -```csharp -using var api = new OpenAIClient(); -var filesList = await api.AssistantsEndpoint.ListFilesAsync("assistant-id"); -// OR AssistantExtension for easier use! -var filesList = await assistant.ListFilesAsync(); - -foreach (var file in filesList.Items) -{ - Console.WriteLine($"{file.AssistantId}'s file -> {file.Id}"); -} -``` - -#### [Attach File to Assistant](https://platform.openai.com/docs/api-reference/assistants/createAssistantFile) - -Create an assistant file by attaching a File to an assistant. - -```csharp -using var api = new OpenAIClient(); -var filePath = "assistant_test_2.txt"; -await File.WriteAllTextAsync(filePath, "Knowledge is power!"); -var fileUploadRequest = new FileUploadRequest(filePath, "assistant"); -var file = await api.FilesEndpoint.UploadFileAsync(fileUploadRequest); -var assistantFile = await api.AssistantsEndpoint.AttachFileAsync("assistant-id", file.Id); -// OR use extension method for convenience! -var assistantFIle = await assistant.AttachFileAsync(file); -``` - -#### [Upload File to Assistant](#upload-file) - -Uploads ***and*** attaches a file to an assistant. - -> Assistant extension method, for extra convenience! - -```csharp -using var api = new OpenAIClient(); -var filePath = "assistant_test_2.txt"; -await File.WriteAllTextAsync(filePath, "Knowledge is power!"); -var assistantFile = await assistant.UploadFileAsync(filePath); -``` - -#### [Retrieve File from Assistant](https://platform.openai.com/docs/api-reference/assistants/getAssistantFile) - -Retrieves an AssistantFile. - -```csharp -using var api = new OpenAIClient(); -var assistantFile = await api.AssistantsEndpoint.RetrieveFileAsync("assistant-id", "file-id"); -// OR AssistantExtension for easier use! -var assistantFile = await assistant.RetrieveFileAsync(fileId); -Console.WriteLine($"{assistantFile.AssistantId}'s file -> {assistantFile.Id}"); -``` - -#### [Remove File from Assistant](https://platform.openai.com/docs/api-reference/assistants/deleteAssistantFile) - -Remove a file from an assistant. - -> Note: The file will remain in your organization until [deleted with FileEndpoint](#delete-file). - -```csharp -using var api = new OpenAIClient(); -var isRemoved = await api.AssistantsEndpoint.RemoveFileAsync("assistant-id", "file-id"); -// OR use extension method for convenience! -var isRemoved = await assistant.RemoveFileAsync("file-id"); -Assert.IsTrue(isRemoved); -``` - -#### [Delete File from Assistant](#delete-file) - -Removes a file from the assistant and then deletes the file from the organization. - -> Assistant extension method, for extra convenience! - -```csharp -using var api = new OpenAIClient(); -var isDeleted = await assistant.DeleteFileAsync("file-id"); -Assert.IsTrue(isDeleted); -``` +#### [Assistant Streaming](https://platform.openai.com/docs/api-reference/assistants-streaming) -### [Threads](https://platform.openai.com/docs/api-reference/threads) +> [!NOTE] +> Assistant stream events can be easily added to existing thread calls by passing `Action streamEventHandler` callback to any existing method that supports streaming. -> :warning: Beta Feature +#### [Threads](https://platform.openai.com/docs/api-reference/threads) Create Threads that [Assistants](#assistants) can interact with. The Threads API is accessed via `OpenAIClient.ThreadsEndpoint` -#### [Create Thread](https://platform.openai.com/docs/api-reference/threads/createThread) +##### [Create Thread](https://platform.openai.com/docs/api-reference/threads/createThread) Create a thread. @@ -549,7 +494,7 @@ var thread = await api.ThreadsEndpoint.CreateThreadAsync(); Console.WriteLine($"Create thread {thread.Id} -> {thread.CreatedAt}"); ``` -#### [Create Thread and Run](https://platform.openai.com/docs/api-reference/runs/createThreadAndRun) +##### [Create Thread and Run](https://platform.openai.com/docs/api-reference/runs/createThreadAndRun) Create a thread and run it in one request. @@ -561,14 +506,62 @@ var assistant = await api.AssistantsEndpoint.CreateAssistantAsync( new CreateAssistantRequest( name: "Math Tutor", instructions: "You are a personal math tutor. Answer questions briefly, in a sentence or less.", - model: Model.GPT4_Turbo)); + model: Model.GPT4o)); var messages = new List { "I need to solve the equation `3x + 11 = 14`. Can you help me?" }; var threadRequest = new CreateThreadRequest(messages); var run = await assistant.CreateThreadAndRunAsync(threadRequest); Console.WriteLine($"Created thread and run: {run.ThreadId} -> {run.Id} -> {run.CreatedAt}"); ``` -#### [Retrieve Thread](https://platform.openai.com/docs/api-reference/threads/getThread) +###### Create Thread and Run Streaming + +Create a thread and run it in one request while streaming events. + +```csharp +using var api = new OpenAIClient(); +var tools = new List +{ + Tool.GetOrCreateTool(typeof(WeatherService), nameof(WeatherService.GetCurrentWeatherAsync)) +}; +var assistantRequest = new CreateAssistantRequest(tools: tools, instructions: "You are a helpful weather assistant. Use the appropriate unit based on geographical location."); +var assistant = await api.AssistantsEndpoint.CreateAssistantAsync(assistantRequest); +ThreadResponse thread = null; +async void StreamEventHandler(IServerSentEvent streamEvent) +{ + switch (streamEvent) + { + case ThreadResponse threadResponse: + thread = threadResponse; + break; + case RunResponse runResponse: + if (runResponse.Status == RunStatus.RequiresAction) + { + var toolOutputs = await assistant.GetToolOutputsAsync(runResponse); + + foreach (var toolOutput in toolOutputs) + { + Console.WriteLine($"Tool Output: {toolOutput}"); + } + + await runResponse.SubmitToolOutputsAsync(toolOutputs, StreamEventHandler); + } + break; + default: + Console.WriteLine(streamEvent.ToJsonString()); + break; + } +} + +var run = await assistant.CreateThreadAndRunAsync("I'm in Kuala-Lumpur, please tell me what's the temperature now?", StreamEventHandler); +run = await run.WaitForStatusChangeAsync(); +var messages = await thread.ListMessagesAsync(); +foreach (var response in messages.Items.Reverse()) +{ + Console.WriteLine($"{response.Role}: {response.PrintContent()}"); +} +``` + +##### [Retrieve Thread](https://platform.openai.com/docs/api-reference/threads/getThread) Retrieves a thread. @@ -580,7 +573,7 @@ thread = await thread.UpdateAsync(); Console.WriteLine($"Retrieve thread {thread.Id} -> {thread.CreatedAt}"); ``` -#### [Modify Thread](https://platform.openai.com/docs/api-reference/threads/modifyThread) +##### [Modify Thread](https://platform.openai.com/docs/api-reference/threads/modifyThread) Modifies a thread. @@ -599,7 +592,7 @@ thread = await thread.ModifyAsync(metadata); Console.WriteLine($"Modify thread {thread.Id} -> {thread.Metadata["key"]}"); ``` -#### [Delete Thread](https://platform.openai.com/docs/api-reference/threads/deleteThread) +##### [Delete Thread](https://platform.openai.com/docs/api-reference/threads/deleteThread) Delete a thread. @@ -611,11 +604,11 @@ var isDeleted = await thread.DeleteAsync(); Assert.IsTrue(isDeleted); ``` -#### [Thread Messages](https://platform.openai.com/docs/api-reference/messages) +##### [Thread Messages](https://platform.openai.com/docs/api-reference/messages) Create messages within threads. -##### [List Thread Messages](https://platform.openai.com/docs/api-reference/messages/listMessages) +###### [List Thread Messages](https://platform.openai.com/docs/api-reference/messages/listMessages) Returns a list of messages for a given thread. @@ -631,7 +624,7 @@ foreach (var message in messageList.Items) } ``` -##### [Create Thread Message](https://platform.openai.com/docs/api-reference/messages/createMessage) +###### [Create Thread Message](https://platform.openai.com/docs/api-reference/messages/createMessage) Create a message. @@ -645,7 +638,7 @@ var message = await thread.CreateMessageAsync("Hello World!"); Console.WriteLine($"{message.Id}: {message.Role}: {message.PrintContent()}"); ``` -##### [Retrieve Thread Message](https://platform.openai.com/docs/api-reference/messages/getMessage) +###### [Retrieve Thread Message](https://platform.openai.com/docs/api-reference/messages/getMessage) Retrieve a message. @@ -658,7 +651,7 @@ var message = await message.UpdateAsync(); Console.WriteLine($"{message.Id}: {message.Role}: {message.PrintContent()}"); ``` -##### [Modify Thread Message](https://platform.openai.com/docs/api-reference/messages/modifyMessage) +###### [Modify Thread Message](https://platform.openai.com/docs/api-reference/messages/modifyMessage) Modify a message. @@ -676,42 +669,11 @@ var message = await message.ModifyAsync(metadata); Console.WriteLine($"Modify message metadata: {message.Id} -> {message.Metadata["key"]}"); ``` -##### Thread Message Files - -###### [List Thread Message Files](https://platform.openai.com/docs/api-reference/messages/listMessageFiles) - -Returns a list of message files. - -```csharp -using var api = new OpenAIClient(); -var fileList = await api.ThreadsEndpoint.ListFilesAsync("thread-id", "message-Id"); -// OR use extension method for convenience! -var fileList = await thread.ListFilesAsync("message-id"); -var fileList = await message.ListFilesAsync(); - -foreach (var file in fileList.Items) -{ - Console.WriteLine(file.Id); -} -``` - -###### [Retrieve Thread Message File](https://platform.openai.com/docs/api-reference/messages/getMessageFile) - -Retrieves a message file. - -```csharp -using var api = new OpenAIClient(); -var file = await api.ThreadsEndpoint.RetrieveFileAsync("thread-id", "message-id", "file-id"); -// OR use extension method for convenience! -var file = await message.RetrieveFileAsync(); -Console.WriteLine(file.Id); -``` - -#### [Thread Runs](https://platform.openai.com/docs/api-reference/runs) +##### [Thread Runs](https://platform.openai.com/docs/api-reference/runs) Represents an execution run on a thread. -##### [List Thread Runs](https://platform.openai.com/docs/api-reference/runs/listRuns) +###### [List Thread Runs](https://platform.openai.com/docs/api-reference/runs/listRuns) Returns a list of runs belonging to a thread. @@ -727,7 +689,7 @@ foreach (var run in runList.Items) } ``` -##### [Create Thread Run](https://platform.openai.com/docs/api-reference/runs/createRun) +###### [Create Thread Run](https://platform.openai.com/docs/api-reference/runs/createRun) Create a run. @@ -737,14 +699,40 @@ var assistant = await api.AssistantsEndpoint.CreateAssistantAsync( new CreateAssistantRequest( name: "Math Tutor", instructions: "You are a personal math tutor. Answer questions briefly, in a sentence or less.", - model: Model.GPT4_Turbo)); + model: Model.GPT4o)); var thread = await api.ThreadsEndpoint.CreateThreadAsync(); var message = await thread.CreateMessageAsync("I need to solve the equation `3x + 11 = 14`. Can you help me?"); var run = await thread.CreateRunAsync(assistant); Console.WriteLine($"[{run.Id}] {run.Status} | {run.CreatedAt}"); ``` -##### [Retrieve Thread Run](https://platform.openai.com/docs/api-reference/runs/getRun) +###### Create Thread Run Streaming + +Create a run and stream the events. + +```csharp +using var api = new OpenAIClient(); +var assistant = await api.AssistantsEndpoint.CreateAssistantAsync( + new CreateAssistantRequest( + name: "Math Tutor", + instructions: "You are a personal math tutor. Answer questions briefly, in a sentence or less. Your responses should be formatted in JSON.", + model: Model.GPT4o, + responseFormat: ChatResponseFormat.Json)); +var thread = await api.ThreadsEndpoint.CreateThreadAsync(); +var message = await thread.CreateMessageAsync("I need to solve the equation `3x + 11 = 14`. Can you help me?"); +var run = await thread.CreateRunAsync(assistant, streamEvent => +{ + Console.WriteLine(streamEvent.ToJsonString()); +}); +var messages = await thread.ListMessagesAsync(); + +foreach (var response in messages.Items.Reverse()) +{ + Console.WriteLine($"{response.Role}: {response.PrintContent()}"); +} +``` + +###### [Retrieve Thread Run](https://platform.openai.com/docs/api-reference/runs/getRun) Retrieves a run. @@ -757,7 +745,7 @@ var run = await run.UpdateAsync(); Console.WriteLine($"[{run.Id}] {run.Status} | {run.CreatedAt}"); ``` -##### [Modify Thread Run](https://platform.openai.com/docs/api-reference/runs/modifyRun) +###### [Modify Thread Run](https://platform.openai.com/docs/api-reference/runs/modifyRun) Modifies a run. @@ -775,11 +763,14 @@ var run = await run.ModifyAsync(metadata); Console.WriteLine($"Modify run {run.Id} -> {run.Metadata["key"]}"); ``` -##### [Thread Submit Tool Outputs to Run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) +###### [Thread Submit Tool Outputs to Run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) When a run has the status: `requires_action` and `required_action.type` is `submit_tool_outputs`, this endpoint can be used to submit the outputs from the tool calls once they're all completed. All outputs must be submitted in a single request. +> [!NOTE] +> See [Create Thread and Run Streaming](#create-thread-and-run-streaming) example on how to stream tool output events. + ```csharp using var api = new OpenAIClient(); var tools = new List @@ -789,18 +780,18 @@ var tools = new List // Or create a tool from a type and the name of the method you want to use for function calling Tool.GetOrCreateTool(typeof(WeatherService), nameof(WeatherService.GetCurrentWeatherAsync)), // Pass in an instance of an object to call a method on it - Tool.GetOrCreateTool(OpenAIClient.ImagesEndPoint, nameof(ImagesEndpoint.GenerateImageAsync))), + Tool.GetOrCreateTool(api.ImagesEndPoint, nameof(ImagesEndpoint.GenerateImageAsync)), // Define func<,> callbacks Tool.FromFunc("name_of_func", () => { /* callback function */ }), Tool.FromFunc("func_with_multiple_params", (t1, t2) => { /* logic that calculates return value */ return tResult; }) }; var assistantRequest = new CreateAssistantRequest(tools: tools, instructions: "You are a helpful weather assistant. Use the appropriate unit based on geographical location."); -var testAssistant = await OpenAIClient.AssistantsEndpoint.CreateAssistantAsync(assistantRequest); +var testAssistant = await api.AssistantsEndpoint.CreateAssistantAsync(assistantRequest); var run = await testAssistant.CreateThreadAndRunAsync("I'm in Kuala-Lumpur, please tell me what's the temperature now?"); // waiting while run is Queued and InProgress run = await run.WaitForStatusChangeAsync(); -// Invoke all the tool call functions and return the tool outputs. +// Invoke all of the tool call functions and return the tool outputs. var toolOutputs = await testAssistant.GetToolOutputsAsync(run.RequiredAction.SubmitToolOutputs.ToolCalls); foreach (var toolOutput in toolOutputs) @@ -819,7 +810,7 @@ foreach (var message in messages.Items.OrderBy(response => response.CreatedAt)) } ``` -##### [List Thread Run Steps](https://platform.openai.com/docs/api-reference/runs/listRunSteps) +###### [List Thread Run Steps](https://platform.openai.com/docs/api-reference/runs/listRunSteps) Returns a list of run steps belonging to a run. @@ -835,7 +826,7 @@ foreach (var runStep in runStepList.Items) } ``` -##### [Retrieve Thread Run Step](https://platform.openai.com/docs/api-reference/runs/getRunStep) +###### [Retrieve Thread Run Step](https://platform.openai.com/docs/api-reference/runs/getRunStep) Retrieves a run step. @@ -848,7 +839,7 @@ var runStep = await runStep.UpdateAsync(); Console.WriteLine($"[{runStep.Id}] {runStep.Status} {runStep.CreatedAt} -> {runStep.ExpiresAt}"); ``` -##### [Cancel Thread Run](https://platform.openai.com/docs/api-reference/runs/cancelRun) +###### [Cancel Thread Run](https://platform.openai.com/docs/api-reference/runs/cancelRun) Cancels a run that is `in_progress`. @@ -860,6 +851,172 @@ var isCancelled = await run.CancelAsync(); Assert.IsTrue(isCancelled); ``` +#### [Vector Stores](https://platform.openai.com/docs/api-reference/vector-stores) + +Vector stores are used to store files for use by the `file_search` tool. + +- [File Search Guide](https://platform.openai.com/docs/assistants/tools/file-search) + +The Vector Stores API is accessed via `OpenAIClient.VectorStoresEndpoint` + +##### [List Vector Stores](https://platform.openai.com/docs/api-reference/vector-stores/list) + +Returns a list of vector stores. + +```csharp +using var api = new OpenAIClient(); +var vectorStores = await OpenAIClient.VectorStoresEndpoint.ListVectorStoresAsync(); + +foreach (var vectorStore in vectorStores.Items) +{ + Console.WriteLine(vectorStore); +} +``` + +##### [Create Vector Store](https://platform.openai.com/docs/api-reference/vector-stores/create) + +Create a vector store. + +```csharp +using var api = new OpenAIClient(); +var createVectorStoreRequest = new CreateVectorStoreRequest("test-vector-store"); +var vectorStore = await api.VectorStoresEndpoint.CreateVectorStoreAsync(createVectorStoreRequest); +Console.WriteLine(vectorStore); +``` + +##### [Retrieve Vector Store](https://platform.openai.com/docs/api-reference/vector-stores/retrieve) + +Retrieves a vector store. + +```csharp +using var api = new OpenAIClient(); +var vectorStore = await api.VectorStoresEndpoint.GetVectorStoreAsync("vector-store-id"); +Console.WriteLine(vectorStore); +``` + +##### [Modify Vector Store](https://platform.openai.com/docs/api-reference/vector-stores/modify) + +Modifies a vector store. + +```csharp +using var api = new OpenAIClient(); +var metadata = new Dictionary { { "Test", DateTime.UtcNow } }; +var vectorStore = await api.VectorStoresEndpoint.ModifyVectorStoreAsync("vector-store-id", metadata: metadata); +Console.WriteLine(vectorStore); +``` + +##### [Delete Vector Store](https://platform.openai.com/docs/api-reference/vector-stores/delete) + +Delete a vector store. + +```csharp +using var api = new OpenAIClient(); +var isDeleted = await api.VectorStoresEndpoint.DeleteVectorStoreAsync("vector-store-id"); +Assert.IsTrue(isDeleted); +``` + +##### [Vector Store Files](https://platform.openai.com/docs/api-reference/vector-stores-files) + +Vector store files represent files inside a vector store. + +- [File Search Guide](https://platform.openai.com/docs/assistants/tools/file-search) + +###### [List Vector Store Files](https://platform.openai.com/docs/api-reference/vector-stores-files/listFiles) + +Returns a list of vector store files. + +```csharp +using var api = new OpenAIClient(); +var files = await api.VectorStoresEndpoint.ListVectorStoreFilesAsync("vector-store-id"); + +foreach (var file in vectorStoreFiles.Items) +{ + Console.WriteLine(file); +} +``` + +###### [Create Vector Store File](https://platform.openai.com/docs/api-reference/vector-stores-files/createFile) + +Create a vector store file by attaching a file to a vector store. + +```csharp +using var api = new OpenAIClient(); +var file = await api.VectorStoresEndpoint.CreateVectorStoreFileAsync("vector-store-id", "file-id", new ChunkingStrategy(ChunkingStrategyType.Static)); +Console.WriteLine(file); +``` + +###### [Retrieve Vector Store File](https://platform.openai.com/docs/api-reference/vector-stores-files/getFile) + +Retrieves a vector store file. + +```csharp +using var api = new OpenAIClient(); +var file = await api.VectorStoresEndpoint.GetVectorStoreFileAsync("vector-store-id", "vector-store-file-id"); +Console.WriteLine(file); +``` + +###### [Delete Vector Store File](https://platform.openai.com/docs/api-reference/vector-stores-files/deleteFile) + +Delete a vector store file. This will remove the file from the vector store but the file itself will not be deleted. To delete the file, use the delete file endpoint. + +```csharp +using var api = new OpenAIClient(); +var isDeleted = await api.VectorStoresEndpoint.DeleteVectorStoreFileAsync("vector-store-id", vectorStoreFile); +Assert.IsTrue(isDeleted); +``` + +##### [Vector Store File Batches](https://platform.openai.com/docs/api-reference/vector-stores-file-batches) + +Vector store files represent files inside a vector store. + +- [File Search Guide](https://platform.openai.com/docs/assistants/tools/file-search) + +###### [Create Vector Store File Batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/createBatch) + +Create a vector store file batch. + +```csharp +using var api = new OpenAIClient(); +var files = new List { "file_id_1","file_id_2" }; +var vectorStoreFileBatch = await api.VectorStoresEndpoint.CreateVectorStoreFileBatchAsync("vector-store-id", files); +Console.WriteLine(vectorStoreFileBatch); +``` + +###### [Retrieve Vector Store File Batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/getBatch) + +Retrieves a vector store file batch. + +```csharp +using var api = new OpenAIClient(); +var vectorStoreFileBatch = await api.VectorStoresEndpoint.GetVectorStoreFileBatchAsync("vector-store-id", "vector-store-file-batch-id"); +// you can also use convenience methods! +vectorStoreFileBatch = await vectorStoreFileBatch.UpdateAsync(); +vectorStoreFileBatch = await vectorStoreFileBatch.WaitForStatusChangeAsync(); +``` + +###### [List Files In Vector Store Batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/listBatchFiles) + +Returns a list of vector store files in a batch. + +```csharp +using var api = new OpenAIClient(); +var files = await api.VectorStoresEndpoint.ListVectorStoreBatchFilesAsync("vector-store-id", "vector-store-file-batch-id"); + +foreach (var file in files.Items) +{ + Console.WriteLine(file); +} +``` + +###### [Cancel Vector Store File Batch](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/cancelBatch) + +Cancel a vector store file batch. This attempts to cancel the processing of files in this batch as soon as possible. + +```csharp +using var api = new OpenAIClient(); +var isCancelled = await api.VectorStoresEndpoint.CancelVectorStoreFileBatchAsync("vector-store-id", "vector-store-file-batch-id"); +``` + ### [Chat](https://platform.openai.com/docs/api-reference/chat) Given a chat conversation, the model will return a chat completion response. @@ -879,7 +1036,7 @@ var messages = new List new Message(Role.Assistant, "The Los Angeles Dodgers won the World Series in 2020."), new Message(Role.User, "Where was it played?"), }; -var chatRequest = new ChatRequest(messages, Model.GPT4_Turbo); +var chatRequest = new ChatRequest(messages, Model.GPT4o); var response = await api.ChatEndpoint.GetCompletionAsync(chatRequest); var choice = response.FirstChoice; Console.WriteLine($"[{choice.Index}] {choice.Message.Role}: {choice.Message} | Finish Reason: {choice.FinishReason}"); @@ -1004,7 +1161,8 @@ foreach (var toolCall in response.FirstChoice.Message.ToolCalls) #### [Chat Vision](https://platform.openai.com/docs/guides/vision) -> :warning: Beta Feature +> [!WARNING] +> Beta Feature. API subject to breaking changes. ```csharp using var api = new OpenAIClient(); @@ -1017,20 +1175,21 @@ var messages = new List new ImageUrl("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", ImageDetail.Low) }) }; -var chatRequest = new ChatRequest(messages, model: Model.GPT4_Turbo); +var chatRequest = new ChatRequest(messages, model: Model.GPT4o); var response = await api.ChatEndpoint.GetCompletionAsync(chatRequest); Console.WriteLine($"{response.FirstChoice.Message.Role}: {response.FirstChoice.Message.Content} | Finish Reason: {response.FirstChoice.FinishDetails}"); ``` #### [Chat Json Mode](https://platform.openai.com/docs/guides/text-generation/json-mode) -> :warning: Beta Feature +> [!WARNING] +> Beta Feature. API subject to breaking changes. -Important notes: - -- When using JSON mode, always instruct the model to produce JSON via some message in the conversation, for example via your system message. If you don't include an explicit instruction to generate JSON, the model may generate an unending stream of whitespace and the request may run continually until it reaches the token limit. To help ensure you don't forget, the API will throw an error if the string "JSON" does not appear somewhere in the context. -- The JSON in the message the model returns may be partial (i.e. cut off) if `finish_reason` is length, which indicates the generation exceeded max_tokens or the conversation exceeded the token limit. To guard against this, check `finish_reason` before parsing the response. -- JSON mode will not guarantee the output matches any specific schema, only that it is valid and parses without errors. +> [!IMPORTANT] +> +> - When using JSON mode, always instruct the model to produce JSON via some message in the conversation, for example via your system message. If you don't include an explicit instruction to generate JSON, the model may generate an unending stream of whitespace and the request may run continually until it reaches the token limit. To help ensure you don't forget, the API will throw an error if the string "JSON" does not appear somewhere in the context. +> - The JSON in the message the model returns may be partial (i.e. cut off) if `finish_reason` is length, which indicates the generation exceeded max_tokens or the conversation exceeded the token limit. To guard against this, check `finish_reason` before parsing the response. +> - JSON mode will not guarantee the output matches any specific schema, only that it is valid and parses without errors. ```csharp var messages = new List @@ -1038,7 +1197,7 @@ var messages = new List new Message(Role.System, "You are a helpful assistant designed to output JSON."), new Message(Role.User, "Who won the world series in 2020?"), }; -var chatRequest = new ChatRequest(messages, Model.GPT4_Turbo, responseFormat: ChatResponseFormat.Json); +var chatRequest = new ChatRequest(messages, Model.GPT4o, responseFormat: ChatResponseFormat.Json); var response = await api.ChatEndpoint.GetCompletionAsync(chatRequest); foreach (var choice in response.Choices) @@ -1189,7 +1348,7 @@ The size of individual files can be a maximum of 512 MB. See the Assistants Tool ```csharp using var api = new OpenAIClient(); -var file = await api.FilesEndpoint.UploadFileAsync("path/to/your/file.jsonl", "fine-tune"); +var file = await api.FilesEndpoint.UploadFileAsync("path/to/your/file.jsonl", FilePurpose.FineTune); Console.WriteLine(file.Id); ``` @@ -1254,7 +1413,7 @@ List your organization's fine-tuning jobs. using var api = new OpenAIClient(); var jobList = await api.FineTuningEndpoint.ListJobsAsync(); -foreach (var job in jobList.Items.OrderByDescending(job => job.CreatedAt))) +foreach (var job in jobList.Items.OrderByDescending(job => job.CreatedAt)) { Console.WriteLine($"{job.Id} -> {job.CreatedAt} | {job.Status}"); } @@ -1295,6 +1454,60 @@ foreach (var @event in eventList.Items.OrderByDescending(@event => @event.Create } ``` +### [Batches](https://platform.openai.com/docs/api-reference/batch) + +Create large batches of API requests for asynchronous processing. The Batch API returns completions within 24 hours for a 50% discount. + +- [Batch Guide](https://platform.openai.com/docs/guides/batch) + +The Batches API is accessed via `OpenAIClient.BatchesEndpoint` + +#### [List Batches](https://platform.openai.com/docs/api-reference/batch/list) + +List your organization's batches. + +```csharp +using var api = new OpenAIClient(); +var batches = await api.await OpenAIClient.BatchEndpoint.ListBatchesAsync(); + +foreach (var batch in listResponse.Items) +{ + Console.WriteLine(batch); +} +``` + +#### [Create Batch](https://platform.openai.com/docs/api-reference/batch/create) + +Creates and executes a batch from an uploaded file of requests + +```csharp +using var api = new OpenAIClient(); +var batchRequest = new CreateBatchRequest("file-id", Endpoint.ChatCompletions); +var batch = await api.BatchEndpoint.CreateBatchAsync(batchRequest); +``` + +#### [Retrieve Batch](https://platform.openai.com/docs/api-reference/batch/retrieve) + +Retrieves a batch. + +```csharp +using var api = new OpenAIClient(); +var batch = await api.BatchEndpoint.RetrieveBatchAsync("batch-id"); +// you can also use convenience methods! +batch = await batch.UpdateAsync(); +batch = await batch.WaitForStatusChangeAsync(); +``` + +#### [Cancel Batch](https://platform.openai.com/docs/api-reference/batch/cancel) + +Cancels an in-progress batch. The batch will be in status cancelling for up to 10 minutes, before changing to cancelled, where it will have partial results (if any) available in the output file. + +```csharp +using var api = new OpenAIClient(); +var isCancelled = await api.BatchEndpoint.CancelBatchAsync(batch); +Assert.IsTrue(isCancelled); +``` + ### [Embeddings](https://platform.openai.com/docs/api-reference/embeddings) Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.