From 49ca9ec592d3e4868546cd1707194468a47af877 Mon Sep 17 00:00:00 2001 From: Kasper Marstal Date: Thu, 21 Nov 2024 22:31:27 +0100 Subject: [PATCH 1/2] feat: Update Llamafile version, add larger Llamafile models (#64) --- src/Cellm/appsettings.Local.Anthropic.json | 2 +- src/Cellm/appsettings.Local.Llamafile.GPU.json | 4 +++- src/Cellm/appsettings.Local.Llamafile.json | 6 ++++-- src/Cellm/appsettings.Local.Mistral.json | 2 +- src/Cellm/appsettings.Local.OpenAi.json | 2 +- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/Cellm/appsettings.Local.Anthropic.json b/src/Cellm/appsettings.Local.Anthropic.json index 646b425..e8f2ea5 100644 --- a/src/Cellm/appsettings.Local.Anthropic.json +++ b/src/Cellm/appsettings.Local.Anthropic.json @@ -1,7 +1,7 @@ { "AnthropicConfiguration": { "DefaultModel": "claude-3-5-sonnet-20241022", - "ApiKey": "YOUR_ANTHROPIC_APIKEY" + "ApiKey": "YOUR_ANTHROPIC_API_KEY" }, "CellmConfiguration": { "DefaultProvider": "Anthropic", diff --git a/src/Cellm/appsettings.Local.Llamafile.GPU.json b/src/Cellm/appsettings.Local.Llamafile.GPU.json index c5e4de6..2c84d7a 100644 --- a/src/Cellm/appsettings.Local.Llamafile.GPU.json +++ b/src/Cellm/appsettings.Local.Llamafile.GPU.json @@ -5,8 +5,10 @@ "DefaultModel": "gemma-2-2b", "Models": { "gemma-2-2b": "https://huggingface.co/bartowski/gemma-2-2b-it-GGUF/resolve/main/gemma-2-2b-it-Q6_K.gguf", + "gemma-2-9b": "https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/resolve/main/gemma-2-9b-it-Q4_K_L.gguf", "llama-3.2-1b": "https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q6_K_L.gguf", - "llama-3.2-3b": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q4_K_L.gguf" + "llama-3.2-3b": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q4_K_L.gguf", + "ministral-8b": "https://huggingface.co/bartowski/Ministral-8B-Instruct-2410-GGUF/resolve/main/Ministral-8B-Instruct-2410-Q5_K_L.gguf" }, "GPU": true, "GpuLayers": 999 diff --git a/src/Cellm/appsettings.Local.Llamafile.json b/src/Cellm/appsettings.Local.Llamafile.json index bb50b19..c6474fb 100644 --- a/src/Cellm/appsettings.Local.Llamafile.json +++ b/src/Cellm/appsettings.Local.Llamafile.json @@ -1,12 +1,14 @@ { "LlamafileConfiguration": { - "LlamafileUrl": "https://github.com/Mozilla-Ocho/llamafile/releases/download/0.8.13/llamafile-0.8.13", + "LlamafileUrl": "https://github.com/Mozilla-Ocho/llamafile/releases/download/0.8.16/llamafile-0.8.16", "BaseAddress": "http://127.0.0.1", "DefaultModel": "gemma-2-2b", "Models": { "gemma-2-2b": "https://huggingface.co/bartowski/gemma-2-2b-it-GGUF/resolve/main/gemma-2-2b-it-Q6_K.gguf", + "gemma-2-9b": "https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/resolve/main/gemma-2-9b-it-Q4_K_L.gguf", "llama-3.2-1b": "https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q6_K_L.gguf", - "llama-3.2-3b": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q4_K_L.gguf" + "llama-3.2-3b": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q4_K_L.gguf", + "ministral-8b": "https://huggingface.co/bartowski/Ministral-8B-Instruct-2410-GGUF/resolve/main/Ministral-8B-Instruct-2410-Q5_K_L.gguf" } }, "CellmConfiguration": { diff --git a/src/Cellm/appsettings.Local.Mistral.json b/src/Cellm/appsettings.Local.Mistral.json index f00ceb0..4a64398 100644 --- a/src/Cellm/appsettings.Local.Mistral.json +++ b/src/Cellm/appsettings.Local.Mistral.json @@ -2,7 +2,7 @@ "OpenAiConfiguration": { "BaseAddress": "https://api.mistral.ai", "DefaultModel": "mistral-small-latest", - "ApiKey": "YOUR_MISTRAL_APIKEY" + "ApiKey": "YOUR_MISTRAL_API_KEY" }, "CellmConfiguration": { "DefaultProvider": "OpenAI", diff --git a/src/Cellm/appsettings.Local.OpenAi.json b/src/Cellm/appsettings.Local.OpenAi.json index 295db65..de345fc 100644 --- a/src/Cellm/appsettings.Local.OpenAi.json +++ b/src/Cellm/appsettings.Local.OpenAi.json @@ -1,7 +1,7 @@ { "OpenAiConfiguration": { "DefaultModel": "gpt-4o-mini", - "ApiKey": "YOUR_OPENAI_APIKEY" + "ApiKey": "YOUR_OPENAI_API_KEY" }, "CellmConfiguration": { "DefaultProvider": "OpenAI", From 40652b9d1bb260e2ef9074f7ad318fbded685811 Mon Sep 17 00:00:00 2001 From: Kasper Marstal Date: Thu, 21 Nov 2024 22:53:33 +0100 Subject: [PATCH 2/2] refactor: Clean up src/Cellm/AddIn (#65) --- .../AddIn/{Functions.cs => CellmFunctions.cs} | 27 ++++----------- ...umentParser.cs => PromptArgumentParser.cs} | 33 ++++++++++--------- .../{AddIn => Prompts}/SystemMessages.cs | 2 +- src/Cellm/Services/ServiceLocator.cs | 2 +- 4 files changed, 26 insertions(+), 38 deletions(-) rename src/Cellm/AddIn/{Functions.cs => CellmFunctions.cs} (90%) rename src/Cellm/AddIn/{PromptWithArgumentParser.cs => PromptArgumentParser.cs} (82%) rename src/Cellm/{AddIn => Prompts}/SystemMessages.cs (94%) diff --git a/src/Cellm/AddIn/Functions.cs b/src/Cellm/AddIn/CellmFunctions.cs similarity index 90% rename from src/Cellm/AddIn/Functions.cs rename to src/Cellm/AddIn/CellmFunctions.cs index 30e2dd3..648cfcf 100644 --- a/src/Cellm/AddIn/Functions.cs +++ b/src/Cellm/AddIn/CellmFunctions.cs @@ -10,7 +10,7 @@ namespace Cellm.AddIn; -public static class Functions +public static class CellmFunctions { /// /// Sends a prompt to the default model configured in CellmConfiguration. @@ -73,7 +73,7 @@ public static object PromptWith( { try { - var arguments = ServiceLocator.Get() + var arguments = ServiceLocator.Get() .AddProvider(providerAndModel) .AddModel(providerAndModel) .AddInstructionsOrContext(instructionsOrContext) @@ -88,8 +88,8 @@ public static object PromptWith( var prompt = new PromptBuilder() .SetModel(arguments.Model) - .SetSystemMessage(SystemMessages.SystemMessage) .SetTemperature(arguments.Temperature) + .AddSystemMessage(SystemMessages.SystemMessage) .AddUserMessage(userMessage) .Build(); @@ -102,6 +102,7 @@ public static object PromptWith( catch (CellmException ex) { SentrySdk.CaptureException(ex); + Debug.WriteLine(ex); return ex.Message; } } @@ -117,22 +118,8 @@ public static object PromptWith( private static async Task CallModelAsync(Prompt prompt, string? provider = null, Uri? baseAddress = null) { - try - { - var client = ServiceLocator.Get(); - var response = await client.Send(prompt, provider, baseAddress); - var content = response.Messages.Last().Content; - return content; - } - catch (CellmException ex) - { - Debug.WriteLine(ex); - throw; - } - catch (Exception ex) - { - Debug.WriteLine(ex); - throw new CellmException("An unexpected error occurred", ex); - } + var client = ServiceLocator.Get(); + var response = await client.Send(prompt, provider, baseAddress); + return response.Messages.Last().Content; } } diff --git a/src/Cellm/AddIn/PromptWithArgumentParser.cs b/src/Cellm/AddIn/PromptArgumentParser.cs similarity index 82% rename from src/Cellm/AddIn/PromptWithArgumentParser.cs rename to src/Cellm/AddIn/PromptArgumentParser.cs index 3303fe6..80ca04d 100644 --- a/src/Cellm/AddIn/PromptWithArgumentParser.cs +++ b/src/Cellm/AddIn/PromptArgumentParser.cs @@ -1,5 +1,6 @@ using System.Text; using Cellm.AddIn.Exceptions; +using Cellm.Prompts; using ExcelDna.Integration; using Microsoft.Extensions.Configuration; using Microsoft.Office.Interop.Excel; @@ -8,7 +9,7 @@ namespace Cellm.AddIn; public record Arguments(string Provider, string Model, string Context, string Instructions, double Temperature); -public class PromptWithArgumentParser +public class PromptArgumentParser { private string? _provider; private string? _model; @@ -18,12 +19,12 @@ public class PromptWithArgumentParser private readonly IConfiguration _configuration; - public PromptWithArgumentParser(IConfiguration configuration) + public PromptArgumentParser(IConfiguration configuration) { _configuration = configuration; } - public PromptWithArgumentParser AddProvider(object providerAndModel) + public PromptArgumentParser AddProvider(object providerAndModel) { _provider = providerAndModel switch { @@ -35,7 +36,7 @@ public PromptWithArgumentParser AddProvider(object providerAndModel) return this; } - public PromptWithArgumentParser AddModel(object providerAndModel) + public PromptArgumentParser AddModel(object providerAndModel) { _model = providerAndModel switch { @@ -47,21 +48,21 @@ public PromptWithArgumentParser AddModel(object providerAndModel) return this; } - public PromptWithArgumentParser AddInstructionsOrContext(object instructionsOrContext) + public PromptArgumentParser AddInstructionsOrContext(object instructionsOrContext) { _instructionsOrContext = instructionsOrContext; return this; } - public PromptWithArgumentParser AddInstructionsOrTemperature(object instructionsOrTemperature) + public PromptArgumentParser AddInstructionsOrTemperature(object instructionsOrTemperature) { _instructionsOrTemperature = instructionsOrTemperature; return this; } - public PromptWithArgumentParser AddTemperature(object temperature) + public PromptArgumentParser AddTemperature(object temperature) { _temperature = temperature; @@ -92,17 +93,17 @@ public Arguments Parse() // "=PROMPT("Extract keywords", 0.7) (string instructions, double temperature, ExcelMissing) => new Arguments(provider, model, string.Empty, RenderInstructions(instructions), ParseTemperature(temperature)), // "=PROMPT(A1:B2) - (ExcelReference context, ExcelMissing, ExcelMissing) => new Arguments(provider, model, RenderContext(ParseCells(context)), RenderInstructions(SystemMessages.InlineInstructions), ParseTemperature(defaultTemperature)), + (ExcelReference context, ExcelMissing, ExcelMissing) => new Arguments(provider, model, RenderCells(ParseCells(context)), RenderInstructions(SystemMessages.InlineInstructions), ParseTemperature(defaultTemperature)), // "=PROMPT(A1:B2, 0.7) - (ExcelReference context, double temperature, ExcelMissing) => new Arguments(provider, model, RenderContext(ParseCells(context)), RenderInstructions(SystemMessages.InlineInstructions), ParseTemperature(defaultTemperature)), + (ExcelReference context, double temperature, ExcelMissing) => new Arguments(provider, model, RenderCells(ParseCells(context)), RenderInstructions(SystemMessages.InlineInstructions), ParseTemperature(defaultTemperature)), // "=PROMPT(A1:B2, "Extract keywords") - (ExcelReference context, string instructions, ExcelMissing) => new Arguments(provider, model, RenderContext(ParseCells(context)), RenderInstructions(instructions), ParseTemperature(defaultTemperature)), + (ExcelReference context, string instructions, ExcelMissing) => new Arguments(provider, model, RenderCells(ParseCells(context)), RenderInstructions(instructions), ParseTemperature(defaultTemperature)), // "=PROMPT(A1:B2, "Extract keywords", 0.7) - (ExcelReference context, string instructions, double temperature) => new Arguments(provider, model, RenderContext(ParseCells(context)), RenderInstructions(instructions), ParseTemperature(temperature)), + (ExcelReference context, string instructions, double temperature) => new Arguments(provider, model, RenderCells(ParseCells(context)), RenderInstructions(instructions), ParseTemperature(temperature)), // "=PROMPT(A1:B2, C1:D2) - (ExcelReference context, ExcelReference instructions, ExcelMissing) => new Arguments(provider, model, RenderContext(ParseCells(context)), RenderInstructions(ParseCells(instructions)), ParseTemperature(defaultTemperature)), + (ExcelReference context, ExcelReference instructions, ExcelMissing) => new Arguments(provider, model, RenderCells(ParseCells(context)), RenderInstructions(ParseCells(instructions)), ParseTemperature(defaultTemperature)), // "=PROMPT(A1:B2, C1:D2, 0.7) - (ExcelReference context, ExcelReference instructions, double temperature) => new Arguments(provider, model, RenderContext(ParseCells(context)), RenderInstructions(ParseCells(instructions)), ParseTemperature(temperature)), + (ExcelReference context, ExcelReference instructions, double temperature) => new Arguments(provider, model, RenderCells(ParseCells(context)), RenderInstructions(ParseCells(instructions)), ParseTemperature(temperature)), // Anything else _ => throw new ArgumentException($"Invalid arguments ({_instructionsOrContext?.GetType().Name}, {_instructionsOrTemperature?.GetType().Name}, {_temperature?.GetType().Name})") }; @@ -110,7 +111,7 @@ public Arguments Parse() private static string GetProvider(string providerAndModel) { - var index = providerAndModel.IndexOf("/"); + var index = providerAndModel.IndexOf('/'); if (index < 0) { @@ -122,7 +123,7 @@ private static string GetProvider(string providerAndModel) private static string GetModel(string providerAndModel) { - var index = providerAndModel.IndexOf("/"); + var index = providerAndModel.IndexOf('/'); if (index < 0) { @@ -203,7 +204,7 @@ private static string GetRowName(int rowNumber) return (rowNumber + 1).ToString(); } - private static string RenderContext(string context) + private static string RenderCells(string context) { return new StringBuilder() .AppendLine("") diff --git a/src/Cellm/AddIn/SystemMessages.cs b/src/Cellm/Prompts/SystemMessages.cs similarity index 94% rename from src/Cellm/AddIn/SystemMessages.cs rename to src/Cellm/Prompts/SystemMessages.cs index 971fbb3..bedcad4 100644 --- a/src/Cellm/AddIn/SystemMessages.cs +++ b/src/Cellm/Prompts/SystemMessages.cs @@ -1,4 +1,4 @@ -namespace Cellm.AddIn; +namespace Cellm.Prompts; internal static class SystemMessages { diff --git a/src/Cellm/Services/ServiceLocator.cs b/src/Cellm/Services/ServiceLocator.cs index c54a52e..9ee296a 100644 --- a/src/Cellm/Services/ServiceLocator.cs +++ b/src/Cellm/Services/ServiceLocator.cs @@ -87,7 +87,7 @@ private static IServiceCollection ConfigureServices(IServiceCollection services) .AddSingleton(configuration) .AddMemoryCache() .AddMediatR(cfg => cfg.RegisterServicesFromAssembly(Assembly.GetExecutingAssembly())) - .AddTransient() + .AddTransient() .AddSingleton() .AddSingleton();