diff --git a/core/src/main/java/com/devoxx/genie/model/LanguageModel.java b/core/src/main/java/com/devoxx/genie/model/LanguageModel.java index 2f0da97e..75125c6f 100644 --- a/core/src/main/java/com/devoxx/genie/model/LanguageModel.java +++ b/core/src/main/java/com/devoxx/genie/model/LanguageModel.java @@ -20,7 +20,7 @@ public class LanguageModel implements Comparable { private int contextWindow; public LanguageModel() { - this(ModelProvider.OPENAI, "", "", false, 0.0, 0.0, 0); + this(ModelProvider.OpenAI, "", "", false, 0.0, 0.0, 0); } public LanguageModel(ModelProvider provider, diff --git a/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java b/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java index 782e5786..2732df44 100644 --- a/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java +++ b/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java @@ -8,28 +8,28 @@ @Getter public enum ModelProvider { - OPENAI("OpenAI", Type.CLOUD), - ANTHROPIC("Anthropic", Type.CLOUD), - MISTRAL("Mistral", Type.CLOUD), - GROQ("Groq", Type.CLOUD), - DEEP_INFRA("DeepInfra", Type.CLOUD), - GOOGLE("Google", Type.CLOUD), - LLAMA("LLaMA.c++", Type.LOCAL), - OPEN_ROUTER("OpenRouter", Type.CLOUD), - DEEP_SEEK("DeepSeek", Type.CLOUD), - AZURE_OPEN_AI("AzureOpenAI", Type.CLOUD), - OLLAMA("Ollama", Type.LOCAL), - LMSTUDIO("LMStudio", Type.LOCAL), - GPT_4_ALL("GPT4All", Type.LOCAL), - JAN("Jan", Type.LOCAL), - JLAMA("Jlama (Experimental /w REST API)", Type.LOCAL), - EXO("Exo (Experimental)", Type.LOCAL), - CUSTOM_OPEN_AI("CustomOpenAI", Type.OPTIONAL); + Ollama("Ollama", Type.LOCAL), + LMStudio("LMStudio", Type.LOCAL), + GPT4All("GPT4All", Type.LOCAL), + Jan("Jan", Type.LOCAL), + OpenAI("OpenAI", Type.CLOUD), + Anthropic("Anthropic", Type.CLOUD), + Mistral("Mistral", Type.CLOUD), + Groq("Groq", Type.CLOUD), + DeepInfra("DeepInfra", Type.CLOUD), + Google("Google", Type.CLOUD), + Exo("Exo (Experimental)", Type.LOCAL), + LLaMA("LLaMA.c++", Type.LOCAL), + OpenRouter("OpenRouter", Type.CLOUD), + DeepSeek("DeepSeek", Type.CLOUD), + Jlama("Jlama (Experimental /w REST API)", Type.LOCAL), + AzureOpenAI("AzureOpenAI", Type.OPTIONAL), + CustomOpenAI("CustomOpenAI", Type.OPTIONAL); public enum Type { LOCAL, // Local Providers CLOUD, // Cloud Providers - OPTIONAL // Optional Providers (Need to be enabled from settings, due to inconvenient setup) + OPTIONAL // Optional Providers(Need to be enabled from settings, due to inconvenient setup) } private final String name; diff --git a/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java b/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java index d3c37dc5..b3b7b362 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java +++ b/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java @@ -23,6 +23,10 @@ public class ChatModelFactoryProvider { + private ChatModelFactoryProvider() { + throw new IllegalStateException("Utility class"); + } + private static final Map factoryCache = new ConcurrentHashMap<>(); public static @NotNull Optional getFactoryByProvider(@NotNull String modelProvider) { diff --git a/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java b/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java index 83396fcf..4663d2e7 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java +++ b/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java @@ -20,7 +20,7 @@ @Setter public class ChatModelProvider { - private static final ModelProvider DEFAULT_PROVIDER = ModelProvider.OPENAI; // Choose an appropriate default + private static final ModelProvider DEFAULT_PROVIDER = ModelProvider.OpenAI; // Choose an appropriate default public ChatLanguageModel getChatLanguageModel(@NotNull ChatMessageContext chatMessageContext) { ChatModel chatModel = initChatModel(chatMessageContext); @@ -72,22 +72,22 @@ private void setLocalBaseUrl(@NotNull LanguageModel languageModel, DevoxxGenieSettingsService stateService) { // Set base URL for local providers switch (languageModel.getProvider()) { - case LMSTUDIO: + case LMStudio: chatModel.setBaseUrl(stateService.getLmstudioModelUrl()); break; - case OLLAMA: + case Ollama: chatModel.setBaseUrl(stateService.getOllamaModelUrl()); break; - case GPT_4_ALL: + case GPT4All: chatModel.setBaseUrl(stateService.getGpt4allModelUrl()); break; - case EXO: + case Exo: chatModel.setBaseUrl(stateService.getExoModelUrl()); break; - case LLAMA: + case LLaMA: chatModel.setBaseUrl(stateService.getLlamaCPPUrl()); break; - case JLAMA: + case Jlama: chatModel.setBaseUrl(stateService.getJlamaUrl()); break; // Add other local providers as needed diff --git a/src/main/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactory.java index d25edebc..8798729a 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactory.java @@ -45,6 +45,6 @@ public String getApiKey() { @Override public List getModels() { - return getModels(ModelProvider.ANTHROPIC); + return getModels(ModelProvider.Anthropic); } } diff --git a/src/main/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAIChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAIChatModelFactory.java index 17b892cf..a88cbc74 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAIChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAIChatModelFactory.java @@ -59,7 +59,7 @@ public String getApiKey() { @Override public List getModels() { return List.of(LanguageModel.builder() - .provider(ModelProvider.AZURE_OPEN_AI) + .provider(ModelProvider.AzureOpenAI) .modelName(DevoxxGenieStateService.getInstance().getAzureOpenAIDeployment()) .displayName(DevoxxGenieStateService.getInstance().getAzureOpenAIDeployment()) .inputCost(0.0) diff --git a/src/main/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactory.java index 2b161d6a..f8fbbdad 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactory.java @@ -49,6 +49,6 @@ public String getApiKey() { @Override public List getModels() { - return getModels(ModelProvider.DEEP_INFRA); + return getModels(ModelProvider.DeepInfra); } } diff --git a/src/main/java/com/devoxx/genie/chatmodel/deepseek/DeepSeekChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/deepseek/DeepSeekChatModelFactory.java index a6608ddc..f139a277 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/deepseek/DeepSeekChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/deepseek/DeepSeekChatModelFactory.java @@ -50,6 +50,6 @@ public String getApiKey() { @Override public List getModels() { - return getModels(ModelProvider.DEEP_SEEK); + return getModels(ModelProvider.DeepSeek); } } diff --git a/src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java index 0d08060e..ca6e825b 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java @@ -55,7 +55,7 @@ public List getModels() { .modelName("llama-3.1-405b") .displayName("Llama 3.1 405B") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(131_000) @@ -65,7 +65,7 @@ public List getModels() { .modelName("llama-3.1-8b") .displayName("Llama 3.1 8B") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(8_000) @@ -75,7 +75,7 @@ public List getModels() { .modelName("llama-3.1-70b") .displayName("Llama 3.1 70B") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(131_000) @@ -85,7 +85,7 @@ public List getModels() { .modelName("llama-3-8b") .displayName("Llama 3 8B") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(8_000) @@ -96,7 +96,7 @@ public List getModels() { .modelName("mistral-nemo") .displayName("Mistral Nemo") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(8_000) @@ -107,7 +107,7 @@ public List getModels() { .modelName("mistral-large") .displayName("Mistral Large") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(8_000) @@ -118,7 +118,7 @@ public List getModels() { .modelName("deepseek-coder-v2-lite") .displayName("Deepseek Coder V2 Lite") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(8_000) @@ -129,7 +129,7 @@ public List getModels() { .modelName("llava-1.5-7b-hf") .displayName("Llava 1.5 7B HF") .apiKeyUsed(false) - .provider(ModelProvider.EXO) + .provider(ModelProvider.Exo) .outputCost(0) .inputCost(0) .contextWindow(8_000) diff --git a/src/main/java/com/devoxx/genie/chatmodel/google/GoogleChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/google/GoogleChatModelFactory.java index 10721be6..cbcbf249 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/google/GoogleChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/google/GoogleChatModelFactory.java @@ -30,6 +30,6 @@ public String getApiKey() { @Override public List getModels() { - return getModels(ModelProvider.GOOGLE); + return getModels(ModelProvider.Google); } } diff --git a/src/main/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactory.java index 607a07ee..3e7c8c57 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactory.java @@ -30,6 +30,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) { .build(); } + @Override public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) { return LocalAiStreamingChatModel.builder() .baseUrl(DevoxxGenieStateService.getInstance().getGpt4allModelUrl()) @@ -43,7 +44,7 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch @Override public List getModels() { LanguageModel lmStudio = LanguageModel.builder() - .provider(ModelProvider.GPT_4_ALL) + .provider(ModelProvider.GPT4All) .modelName("GPT4All") .inputCost(0) .outputCost(0) diff --git a/src/main/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactory.java index 35788b31..8e753ce3 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactory.java @@ -47,6 +47,6 @@ public String getApiKey() { @Override public List getModels() { - return getModels(ModelProvider.GROQ); + return getModels(ModelProvider.Groq); } } diff --git a/src/main/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactory.java index 35b25d34..9eca3b55 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactory.java @@ -71,7 +71,7 @@ public List getModels() { for (Data model : models) { CompletableFuture future = CompletableFuture.runAsync(() -> { LanguageModel languageModel = LanguageModel.builder() - .provider(ModelProvider.JAN) + .provider(ModelProvider.Jan) .modelName(model.getId()) .displayName(model.getName()) .inputCost(0) diff --git a/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactory.java index 20427d8c..252931fe 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactory.java @@ -74,7 +74,7 @@ public List getModels() { for (LMStudioModelEntryDTO model : lmStudioModels) { CompletableFuture future = CompletableFuture.runAsync(() -> { LanguageModel languageModel = LanguageModel.builder() - .provider(ModelProvider.LMSTUDIO) + .provider(ModelProvider.LMStudio) .modelName(model.getId()) .displayName(model.getId()) .inputCost(0) diff --git a/src/main/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactory.java index 1ec4dc8e..11a33860 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactory.java @@ -47,6 +47,6 @@ public String getApiKey() { @Override public List getModels() { - return getModels(ModelProvider.MISTRAL); + return getModels(ModelProvider.Mistral); } } diff --git a/src/main/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactory.java index 0f799ed2..03e803bb 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactory.java @@ -75,7 +75,7 @@ public List getModels() { try { int contextWindow = OllamaApiService.getModelContext(model.getName()); LanguageModel languageModel = LanguageModel.builder() - .provider(ModelProvider.OLLAMA) + .provider(ModelProvider.Ollama) .modelName(model.getName()) .displayName(model.getName()) .inputCost(0) diff --git a/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java index 0de6f785..6c7c4ffa 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java @@ -59,6 +59,6 @@ public String getApiKey() { @Override public List getModels() { - return getModels(ModelProvider.OPENAI); + return getModels(ModelProvider.OpenAI); } } diff --git a/src/main/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactory.java index 1a7edc61..67015b3b 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactory.java @@ -86,7 +86,7 @@ public List getModels() { double outputCost = convertAndScalePrice(model.getPricing().getCompletion()); LanguageModel languageModel = LanguageModel.builder() - .provider(ModelProvider.OPEN_ROUTER) + .provider(ModelProvider.OpenRouter) .modelName(model.getId()) .displayName(model.getName()) .inputCost(inputCost) diff --git a/src/main/java/com/devoxx/genie/controller/ActionPanelController.java b/src/main/java/com/devoxx/genie/controller/ActionPanelController.java index 368b1e5c..bf0965d5 100644 --- a/src/main/java/com/devoxx/genie/controller/ActionPanelController.java +++ b/src/main/java/com/devoxx/genie/controller/ActionPanelController.java @@ -155,10 +155,10 @@ private boolean validateAndPreparePrompt(String actionCommand, private LanguageModel createDefaultLanguageModel(@NotNull DevoxxGenieSettingsService stateService) { ModelProvider selectedProvider = (ModelProvider) modelProviderComboBox.getSelectedItem(); if (selectedProvider != null && - (selectedProvider.equals(ModelProvider.LMSTUDIO) || - selectedProvider.equals(ModelProvider.GPT_4_ALL) || - selectedProvider.equals(ModelProvider.JLAMA) || - selectedProvider.equals(ModelProvider.LLAMA))) { + (selectedProvider.equals(ModelProvider.LMStudio) || + selectedProvider.equals(ModelProvider.GPT4All) || + selectedProvider.equals(ModelProvider.Jlama) || + selectedProvider.equals(ModelProvider.LLaMA))) { return LanguageModel.builder() .provider(selectedProvider) .apiKeyUsed(false) @@ -169,7 +169,7 @@ private LanguageModel createDefaultLanguageModel(@NotNull DevoxxGenieSettingsSer } else { String modelName = stateService.getSelectedLanguageModel(project.getLocationHash()); return LanguageModel.builder() - .provider(selectedProvider != null ? selectedProvider : ModelProvider.OPENAI) + .provider(selectedProvider != null ? selectedProvider : ModelProvider.OpenAI) .modelName(modelName) .apiKeyUsed(false) .inputCost(0) diff --git a/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java b/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java index 6ebbd77a..cfb6a3ce 100644 --- a/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java +++ b/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java @@ -39,309 +39,309 @@ public LLMModelRegistryService() { private void addAnthropicModels() { String claude2 = CLAUDE_2.toString(); - models.put(ModelProvider.ANTHROPIC.getName() + "-" + claude2, - LanguageModel.builder() - .provider(ModelProvider.ANTHROPIC) - .modelName(claude2) - .displayName("Claude 2.0") - .inputCost(8) - .outputCost(24) - .contextWindow(100_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Anthropic.getName() + "-" + claude2, + LanguageModel.builder() + .provider(ModelProvider.Anthropic) + .modelName(claude2) + .displayName("Claude 2.0") + .inputCost(8) + .outputCost(24) + .contextWindow(100_000) + .apiKeyUsed(true) + .build()); String claude21 = CLAUDE_2_1.toString(); - models.put(ModelProvider.ANTHROPIC.getName() + "-" + claude21, - LanguageModel.builder() - .provider(ModelProvider.ANTHROPIC) - .modelName(claude21) - .displayName("Claude 2.1") - .inputCost(8) - .outputCost(24) - .contextWindow(200_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Anthropic.getName() + "-" + claude21, + LanguageModel.builder() + .provider(ModelProvider.Anthropic) + .modelName(claude21) + .displayName("Claude 2.1") + .inputCost(8) + .outputCost(24) + .contextWindow(200_000) + .apiKeyUsed(true) + .build()); String claudeHaiku3 = CLAUDE_3_HAIKU_20240307.toString(); - models.put(ModelProvider.ANTHROPIC.getName() + "-" + claudeHaiku3, - LanguageModel.builder() - .provider(ModelProvider.ANTHROPIC) - .modelName(claudeHaiku3) - .displayName("Claude 3 Haiku") - .inputCost(0.25) - .outputCost(1.25) - .contextWindow(200_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Anthropic.getName() + "-" + claudeHaiku3, + LanguageModel.builder() + .provider(ModelProvider.Anthropic) + .modelName(claudeHaiku3) + .displayName("Claude 3 Haiku") + .inputCost(0.25) + .outputCost(1.25) + .contextWindow(200_000) + .apiKeyUsed(true) + .build()); String claudeSonnet3 = CLAUDE_3_SONNET_20240229.toString(); - models.put(ModelProvider.ANTHROPIC.getName() + "-" + claudeSonnet3, - LanguageModel.builder() - .provider(ModelProvider.ANTHROPIC) - .modelName(claudeSonnet3) - .displayName("Claude 3 Sonnet") - .inputCost(3) - .outputCost(15) - .contextWindow(200_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Anthropic.getName() + "-" + claudeSonnet3, + LanguageModel.builder() + .provider(ModelProvider.Anthropic) + .modelName(claudeSonnet3) + .displayName("Claude 3 Sonnet") + .inputCost(3) + .outputCost(15) + .contextWindow(200_000) + .apiKeyUsed(true) + .build()); String claudeOpus3 = CLAUDE_3_OPUS_20240229.toString(); - models.put(ModelProvider.ANTHROPIC.getName() + "-" + claudeOpus3, - LanguageModel.builder() - .provider(ModelProvider.ANTHROPIC) - .modelName(claudeOpus3) - .displayName("Claude 3 Opus") - .inputCost(15) - .outputCost(75) - .contextWindow(200_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Anthropic.getName() + "-" + claudeOpus3, + LanguageModel.builder() + .provider(ModelProvider.Anthropic) + .modelName(claudeOpus3) + .displayName("Claude 3 Opus") + .inputCost(15) + .outputCost(75) + .contextWindow(200_000) + .apiKeyUsed(true) + .build()); String claudeSonnet35 = CLAUDE_3_5_SONNET_20241022.toString(); - models.put(ModelProvider.ANTHROPIC.getName() + "-" + claudeSonnet35, - LanguageModel.builder() - .provider(ModelProvider.ANTHROPIC) - .modelName(claudeSonnet35) - .displayName("Claude 3.5 Sonnet") - .inputCost(3) - .outputCost(15) - .contextWindow(200_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Anthropic.getName() + "-" + claudeSonnet35, + LanguageModel.builder() + .provider(ModelProvider.Anthropic) + .modelName(claudeSonnet35) + .displayName("Claude 3.5 Sonnet") + .inputCost(3) + .outputCost(15) + .contextWindow(200_000) + .apiKeyUsed(true) + .build()); String claudeHaiku35 = CLAUDE_3_5_HAIKU_20241022.toString(); - models.put(ModelProvider.ANTHROPIC.getName() + "-" + claudeHaiku35, - LanguageModel.builder() - .provider(ModelProvider.ANTHROPIC) - .modelName(claudeHaiku35) - .displayName("Claude 3.5 Haiku") - .inputCost(1) - .outputCost(5) - .contextWindow(200_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Anthropic.getName() + "-" + claudeHaiku35, + LanguageModel.builder() + .provider(ModelProvider.Anthropic) + .modelName(claudeHaiku35) + .displayName("Claude 3.5 Haiku") + .inputCost(1) + .outputCost(5) + .contextWindow(200_000) + .apiKeyUsed(true) + .build()); } private void addOpenAiModels() { String o1Mini = "o1-mini"; - models.put(ModelProvider.OPENAI.getName() + ":" + o1Mini, - LanguageModel.builder() - .provider(ModelProvider.OPENAI) - .modelName(o1Mini) - .displayName("o1 mini") - .inputCost(5) - .outputCost(15) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.OpenAI.getName() + ":" + o1Mini, + LanguageModel.builder() + .provider(ModelProvider.OpenAI) + .modelName(o1Mini) + .displayName("o1 mini") + .inputCost(5) + .outputCost(15) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String o1Preview = "o1-preview"; - models.put(ModelProvider.OPENAI.getName() + ":" + o1Preview, - LanguageModel.builder() - .provider(ModelProvider.OPENAI) - .modelName(o1Preview) - .displayName("o1 preview") - .inputCost(10) - .outputCost(30) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.OpenAI.getName() + ":" + o1Preview, + LanguageModel.builder() + .provider(ModelProvider.OpenAI) + .modelName(o1Preview) + .displayName("o1 preview") + .inputCost(10) + .outputCost(30) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String gpt35Turbo = GPT_3_5_TURBO.toString(); - models.put(ModelProvider.OPENAI.getName() + ":" + gpt35Turbo, - LanguageModel.builder() - .provider(ModelProvider.OPENAI) - .modelName(gpt35Turbo) - .displayName("GPT 3.5 Turbo") - .inputCost(0.5) - .outputCost(1.5) - .contextWindow(16_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.OpenAI.getName() + ":" + gpt35Turbo, + LanguageModel.builder() + .provider(ModelProvider.OpenAI) + .modelName(gpt35Turbo) + .displayName("GPT 3.5 Turbo") + .inputCost(0.5) + .outputCost(1.5) + .contextWindow(16_000) + .apiKeyUsed(true) + .build()); String gpt4 = GPT_4.toString(); - models.put(ModelProvider.OPENAI.getName() + ":" + gpt4, - LanguageModel.builder() - .provider(ModelProvider.OPENAI) - .modelName(gpt4) - .displayName("GPT 4") - .inputCost(30) - .outputCost(60) - .contextWindow(8_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.OpenAI.getName() + ":" + gpt4, + LanguageModel.builder() + .provider(ModelProvider.OpenAI) + .modelName(gpt4) + .displayName("GPT 4") + .inputCost(30) + .outputCost(60) + .contextWindow(8_000) + .apiKeyUsed(true) + .build()); String gpt4TurboPreview = GPT_4_TURBO_PREVIEW.toString(); - models.put(ModelProvider.OPENAI.getName() + ":" + gpt4TurboPreview, - LanguageModel.builder() - .provider(ModelProvider.OPENAI) - .modelName(gpt4TurboPreview) - .displayName("GPT 4 Turbo") - .inputCost(10) - .outputCost(30) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.OpenAI.getName() + ":" + gpt4TurboPreview, + LanguageModel.builder() + .provider(ModelProvider.OpenAI) + .modelName(gpt4TurboPreview) + .displayName("GPT 4 Turbo") + .inputCost(10) + .outputCost(30) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String gpt4o = GPT_4_O.toString(); - models.put(ModelProvider.OPENAI.getName() + ":" + gpt4o, - LanguageModel.builder() - .provider(ModelProvider.OPENAI) - .modelName(gpt4o) - .displayName("GPT 4o") - .inputCost(5) - .outputCost(15) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.OpenAI.getName() + ":" + gpt4o, + LanguageModel.builder() + .provider(ModelProvider.OpenAI) + .modelName(gpt4o) + .displayName("GPT 4o") + .inputCost(5) + .outputCost(15) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String gpt4oMini = GPT_4_O_MINI.toString(); - models.put(ModelProvider.OPENAI.getName() + ":" + gpt4oMini, - LanguageModel.builder() - .provider(ModelProvider.OPENAI) - .modelName(gpt4oMini) - .displayName("GPT 4o mini") - .inputCost(0.15) - .outputCost(0.6) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.OpenAI.getName() + ":" + gpt4oMini, + LanguageModel.builder() + .provider(ModelProvider.OpenAI) + .modelName(gpt4oMini) + .displayName("GPT 4o mini") + .inputCost(0.15) + .outputCost(0.6) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); } private void addDeepInfraModels() { String metaLlama31Instruct405B = "meta-llama/Meta-Llama-3.1-405B-Instruct"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + metaLlama31Instruct405B, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(metaLlama31Instruct405B) - .displayName("Meta Llama 3.1 405B") - .inputCost(2.7) - .outputCost(2.7) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + metaLlama31Instruct405B, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(metaLlama31Instruct405B) + .displayName("Meta Llama 3.1 405B") + .inputCost(2.7) + .outputCost(2.7) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String metaLlama31Instruct70B = "meta-llama/Meta-Llama-3.1-70B-Instruct"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + metaLlama31Instruct70B, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(metaLlama31Instruct70B) - .displayName("Meta Llama 3.1 70B") - .inputCost(0.35) - .outputCost(0.4) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + metaLlama31Instruct70B, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(metaLlama31Instruct70B) + .displayName("Meta Llama 3.1 70B") + .inputCost(0.35) + .outputCost(0.4) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String metaLlama31Instruct8B = "meta-llama/Meta-Llama-3.1-8B-Instruct"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + metaLlama31Instruct8B, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(metaLlama31Instruct8B) - .displayName("Meta Llama 3.1 8B") - .inputCost(0.055) - .outputCost(0.055) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + metaLlama31Instruct8B, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(metaLlama31Instruct8B) + .displayName("Meta Llama 3.1 8B") + .inputCost(0.055) + .outputCost(0.055) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String mistralNemoInstruct2407 = "mistralai/Mistral-Nemo-Instruct-2407"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + mistralNemoInstruct2407, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(mistralNemoInstruct2407) - .displayName("Mistral Nemo 12B") - .inputCost(0.13) - .outputCost(0.13) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + mistralNemoInstruct2407, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(mistralNemoInstruct2407) + .displayName("Mistral Nemo 12B") + .inputCost(0.13) + .outputCost(0.13) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String mistralMixtral8x7BInstruct = "mistralai/Mixtral-8x7B-Instruct-v0.1"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + mistralMixtral8x7BInstruct, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(mistralMixtral8x7BInstruct) - .displayName("Mixtral 8x7B Instruct v0.1") - .inputCost(0.24) - .outputCost(0.24) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + mistralMixtral8x7BInstruct, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(mistralMixtral8x7BInstruct) + .displayName("Mixtral 8x7B Instruct v0.1") + .inputCost(0.24) + .outputCost(0.24) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String mistralMixtral8x22BInstruct = "mistralai/Mixtral-8x22B-Instruct-v0.1"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + mistralMixtral8x22BInstruct, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(mistralMixtral8x22BInstruct) - .displayName("Mixtral 8x22B Instruct v0.1") - .inputCost(0.65) - .outputCost(0.65) - .contextWindow(64_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + mistralMixtral8x22BInstruct, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(mistralMixtral8x22BInstruct) + .displayName("Mixtral 8x22B Instruct v0.1") + .inputCost(0.65) + .outputCost(0.65) + .contextWindow(64_000) + .apiKeyUsed(true) + .build()); String mistralMistral7BInstruct = "mistralai/Mistral-7B-Instruct-v0.3"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + mistralMistral7BInstruct, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(mistralMistral7BInstruct) - .displayName("Mistral 7B Instruct v0.3") - .inputCost(0.07) - .outputCost(0.07) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + mistralMistral7BInstruct, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(mistralMistral7BInstruct) + .displayName("Mistral 7B Instruct v0.3") + .inputCost(0.07) + .outputCost(0.07) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String microsoftWizardLM8x22B = "microsoft/WizardLM-2-8x22B"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + microsoftWizardLM8x22B, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(microsoftWizardLM8x22B) - .displayName("Wizard LM 2 8x22B") - .inputCost(0.5) - .outputCost(0.5) - .contextWindow(64_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + microsoftWizardLM8x22B, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(microsoftWizardLM8x22B) + .displayName("Wizard LM 2 8x22B") + .inputCost(0.5) + .outputCost(0.5) + .contextWindow(64_000) + .apiKeyUsed(true) + .build()); String microsoftWizardLM7B = "microsoft/WizardLM-2-7B"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + microsoftWizardLM7B, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(microsoftWizardLM7B) - .displayName("Wizard LM 2 7B") - .inputCost(0.055) - .outputCost(0.055) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + microsoftWizardLM7B, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(microsoftWizardLM7B) + .displayName("Wizard LM 2 7B") + .inputCost(0.055) + .outputCost(0.055) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String openchat35 = "openchat/openchat_3.5"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + openchat35, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(openchat35) - .displayName("OpenChat 3.5") - .inputCost(0.055) - .outputCost(0.055) - .contextWindow(8_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + openchat35, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(openchat35) + .displayName("OpenChat 3.5") + .inputCost(0.055) + .outputCost(0.055) + .contextWindow(8_000) + .apiKeyUsed(true) + .build()); String googleGemma9b = "google/gemma-2-9b-it"; - models.put(ModelProvider.DEEP_INFRA.getName() + ":" + googleGemma9b, - LanguageModel.builder() - .provider(ModelProvider.DEEP_INFRA) - .modelName(googleGemma9b) - .displayName("Gemma 2 9B it") - .inputCost(0.06) - .outputCost(0.06) - .contextWindow(4_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepInfra.getName() + ":" + googleGemma9b, + LanguageModel.builder() + .provider(ModelProvider.DeepInfra) + .modelName(googleGemma9b) + .displayName("Gemma 2 9B it") + .inputCost(0.06) + .outputCost(0.06) + .contextWindow(4_000) + .apiKeyUsed(true) + .build()); } /** @@ -352,57 +352,57 @@ private void addDeepInfraModels() { private void addGeminiModels() { String gemini15Flash = "gemini-1.5-flash"; - models.put(ModelProvider.GOOGLE.getName() + ":" + gemini15Flash, - LanguageModel.builder() - .provider(ModelProvider.GOOGLE) - .modelName(gemini15Flash) - .displayName("Gemini 1.5 Flash") - .inputCost(0.0375) - .outputCost(0.6) - .contextWindow(1_000_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Google.getName() + ":" + gemini15Flash, + LanguageModel.builder() + .provider(ModelProvider.Google) + .modelName(gemini15Flash) + .displayName("Gemini 1.5 Flash") + .inputCost(0.0375) + .outputCost(0.6) + .contextWindow(1_000_000) + .apiKeyUsed(true) + .build()); String gemini15Pro = "gemini-1.5-pro"; - models.put(ModelProvider.GOOGLE.getName() + ":" + gemini15Pro, - LanguageModel.builder() - .provider(ModelProvider.GOOGLE) - .modelName(gemini15Pro) - .displayName("Gemini 1.5 Pro") - .inputCost(7) - .outputCost(21) - .contextWindow(2_000_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Google.getName() + ":" + gemini15Pro, + LanguageModel.builder() + .provider(ModelProvider.Google) + .modelName(gemini15Pro) + .displayName("Gemini 1.5 Pro") + .inputCost(7) + .outputCost(21) + .contextWindow(2_000_000) + .apiKeyUsed(true) + .build()); String gemini15ProExp0801 = "gemini-1.5-pro-exp-0801"; - models.put(ModelProvider.GOOGLE.getName() + ":" + gemini15ProExp0801, - LanguageModel.builder() - .provider(ModelProvider.GOOGLE) - .modelName(gemini15ProExp0801) - .displayName("Gemini 1.5 Pro 0801") - .inputCost(7) - .outputCost(21) - .contextWindow(2_000_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Google.getName() + ":" + gemini15ProExp0801, + LanguageModel.builder() + .provider(ModelProvider.Google) + .modelName(gemini15ProExp0801) + .displayName("Gemini 1.5 Pro 0801") + .inputCost(7) + .outputCost(21) + .contextWindow(2_000_000) + .apiKeyUsed(true) + .build()); String gemini10Pro = "gemini-1.0-pro"; - models.put(ModelProvider.GOOGLE.getName() + ":" + gemini10Pro, - LanguageModel.builder() - .provider(ModelProvider.GOOGLE) - .modelName(gemini10Pro) - .displayName("Gemini 1.0 Pro") - .inputCost(0.5) - .outputCost(1.5) - .contextWindow(1_000_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Google.getName() + ":" + gemini10Pro, + LanguageModel.builder() + .provider(ModelProvider.Google) + .modelName(gemini10Pro) + .displayName("Gemini 1.0 Pro") + .inputCost(0.5) + .outputCost(1.5) + .contextWindow(1_000_000) + .apiKeyUsed(true) + .build()); String geminiExp1206 = "gemini-exp-1206"; - models.put(ModelProvider.GOOGLE.getName() + ":" + gemini10Pro, + models.put(ModelProvider.Google.getName() + ":" + gemini10Pro, LanguageModel.builder() - .provider(ModelProvider.GOOGLE) + .provider(ModelProvider.Google) .modelName(geminiExp1206) .displayName("Gemini Exp 1206") .inputCost(0) @@ -415,200 +415,200 @@ private void addGeminiModels() { private void addGroqModels() { String gemma7b = "gemma-7b-it"; - models.put(ModelProvider.GROQ.getName() + ":" + gemma7b, - LanguageModel.builder() - .provider(ModelProvider.GROQ) - .modelName(gemma7b) - .displayName("Gemma 7B it") - .inputCost(0.07) - .outputCost(0.07) - .contextWindow(8_192) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Groq.getName() + ":" + gemma7b, + LanguageModel.builder() + .provider(ModelProvider.Groq) + .modelName(gemma7b) + .displayName("Gemma 7B it") + .inputCost(0.07) + .outputCost(0.07) + .contextWindow(8_192) + .apiKeyUsed(true) + .build()); String gemma2 = "gemma2-9b-it"; - models.put(ModelProvider.GROQ.getName() + ":" + gemma2, - LanguageModel.builder() - .provider(ModelProvider.GROQ) - .modelName(gemma2) - .displayName("Gemma 2 9B it") - .inputCost(0.2) - .outputCost(0.2) - .contextWindow(8_192) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Groq.getName() + ":" + gemma2, + LanguageModel.builder() + .provider(ModelProvider.Groq) + .modelName(gemma2) + .displayName("Gemma 2 9B it") + .inputCost(0.2) + .outputCost(0.2) + .contextWindow(8_192) + .apiKeyUsed(true) + .build()); String llama3 = "llama3-8b-8192"; - models.put(ModelProvider.GROQ.getName() + ":" + llama3, - LanguageModel.builder() - .provider(ModelProvider.GROQ) - .modelName(llama3) - .displayName("Llama 3 8B") - .inputCost(0.05) - .outputCost(0.05) - .contextWindow(8_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Groq.getName() + ":" + llama3, + LanguageModel.builder() + .provider(ModelProvider.Groq) + .modelName(llama3) + .displayName("Llama 3 8B") + .inputCost(0.05) + .outputCost(0.05) + .contextWindow(8_000) + .apiKeyUsed(true) + .build()); String llama31Versatile = "llama-3.1-70b-versatile"; - models.put(ModelProvider.GROQ.getName() + ":" + llama31Versatile, - LanguageModel.builder() - .provider(ModelProvider.GROQ) - .modelName(llama31Versatile) - .displayName("Llama 3.1 70B") - .inputCost(0.59) - .outputCost(0.79) - .contextWindow(131_072) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Groq.getName() + ":" + llama31Versatile, + LanguageModel.builder() + .provider(ModelProvider.Groq) + .modelName(llama31Versatile) + .displayName("Llama 3.1 70B") + .inputCost(0.59) + .outputCost(0.79) + .contextWindow(131_072) + .apiKeyUsed(true) + .build()); String llama31Instant = "llama-3.1-8b-instant"; - models.put(ModelProvider.GROQ.getName() + ":" + llama31Instant, - LanguageModel.builder() - .provider(ModelProvider.GROQ) - .modelName(llama31Instant) - .displayName("Llama 3.1 8B") - .inputCost(0.05) - .outputCost(0.08) - .contextWindow(131_072) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Groq.getName() + ":" + llama31Instant, + LanguageModel.builder() + .provider(ModelProvider.Groq) + .modelName(llama31Instant) + .displayName("Llama 3.1 8B") + .inputCost(0.05) + .outputCost(0.08) + .contextWindow(131_072) + .apiKeyUsed(true) + .build()); String mixtral8x7b = "mixtral-8x7b-32768"; - models.put(ModelProvider.GROQ.getName() + ":" + mixtral8x7b, - LanguageModel.builder() - .provider(ModelProvider.GROQ) - .modelName(mixtral8x7b) - .displayName("Mixtral 8x7B") - .inputCost(0.24) - .outputCost(0.24) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Groq.getName() + ":" + mixtral8x7b, + LanguageModel.builder() + .provider(ModelProvider.Groq) + .modelName(mixtral8x7b) + .displayName("Mixtral 8x7B") + .inputCost(0.24) + .outputCost(0.24) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String llama370b = "llama3-70b-8192"; - models.put(ModelProvider.GROQ.getName() + ":" + llama370b, - LanguageModel.builder() - .provider(ModelProvider.GROQ) - .modelName(llama370b) - .displayName("Llama 3 70B") - .inputCost(0.59) - .outputCost(0.79) - .contextWindow(8192) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Groq.getName() + ":" + llama370b, + LanguageModel.builder() + .provider(ModelProvider.Groq) + .modelName(llama370b) + .displayName("Llama 3 70B") + .inputCost(0.59) + .outputCost(0.79) + .contextWindow(8192) + .apiKeyUsed(true) + .build()); } private void addMistralModels() { String openMistral7B = OPEN_MISTRAL_7B.toString(); - models.put(ModelProvider.MISTRAL.getName() + ":" + openMistral7B, - LanguageModel.builder() - .provider(ModelProvider.MISTRAL) - .modelName(openMistral7B) - .displayName("Mistral 7B") - .inputCost(0.25) - .outputCost(0.25) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Mistral.getName() + ":" + openMistral7B, + LanguageModel.builder() + .provider(ModelProvider.Mistral) + .modelName(openMistral7B) + .displayName("Mistral 7B") + .inputCost(0.25) + .outputCost(0.25) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String openMixtral8x7B = OPEN_MIXTRAL_8x7B.toString(); - models.put(ModelProvider.MISTRAL.getName() + ":" + openMixtral8x7B, - LanguageModel.builder() - .provider(ModelProvider.MISTRAL) - .modelName(openMixtral8x7B) - .displayName("Mistral 8x7B") - .inputCost(0.7) - .outputCost(0.7) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Mistral.getName() + ":" + openMixtral8x7B, + LanguageModel.builder() + .provider(ModelProvider.Mistral) + .modelName(openMixtral8x7B) + .displayName("Mistral 8x7B") + .inputCost(0.7) + .outputCost(0.7) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String openMixtral8x22B = OPEN_MIXTRAL_8X22B.toString(); - models.put(ModelProvider.MISTRAL.getName() + ":" + openMixtral8x22B, - LanguageModel.builder() - .provider(ModelProvider.MISTRAL) - .modelName(openMixtral8x22B) - .displayName("Mistral 8x22b") - .inputCost(2) - .outputCost(6) - .contextWindow(64_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Mistral.getName() + ":" + openMixtral8x22B, + LanguageModel.builder() + .provider(ModelProvider.Mistral) + .modelName(openMixtral8x22B) + .displayName("Mistral 8x22b") + .inputCost(2) + .outputCost(6) + .contextWindow(64_000) + .apiKeyUsed(true) + .build()); String mistralSmallLatest = MISTRAL_SMALL_LATEST.toString(); - models.put(ModelProvider.MISTRAL.getName() + ":" + mistralSmallLatest, - LanguageModel.builder() - .provider(ModelProvider.MISTRAL) - .modelName(mistralSmallLatest) - .displayName("Mistral Small") - .inputCost(1) - .outputCost(3) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Mistral.getName() + ":" + mistralSmallLatest, + LanguageModel.builder() + .provider(ModelProvider.Mistral) + .modelName(mistralSmallLatest) + .displayName("Mistral Small") + .inputCost(1) + .outputCost(3) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String mistralMediumLatest = MISTRAL_MEDIUM_LATEST.toString(); - models.put(ModelProvider.MISTRAL.getName() + ":" + mistralMediumLatest, - LanguageModel.builder() - .provider(ModelProvider.MISTRAL) - .modelName(mistralMediumLatest) - .displayName("Mistral Medium") - .inputCost(2.7) - .outputCost(0.1) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Mistral.getName() + ":" + mistralMediumLatest, + LanguageModel.builder() + .provider(ModelProvider.Mistral) + .modelName(mistralMediumLatest) + .displayName("Mistral Medium") + .inputCost(2.7) + .outputCost(0.1) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String mistralLargeLatest = MISTRAL_LARGE_LATEST.toString(); - models.put(ModelProvider.MISTRAL.getName() + ":" + mistralLargeLatest, - LanguageModel.builder() - .provider(ModelProvider.MISTRAL) - .modelName(mistralLargeLatest) - .displayName("Mistral Large") - .inputCost(4) - .outputCost(12) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Mistral.getName() + ":" + mistralLargeLatest, + LanguageModel.builder() + .provider(ModelProvider.Mistral) + .modelName(mistralLargeLatest) + .displayName("Mistral Large") + .inputCost(4) + .outputCost(12) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); String codestral = "codestral-2405"; - models.put(ModelProvider.MISTRAL.getName() + ":" + codestral, - LanguageModel.builder() - .provider(ModelProvider.MISTRAL) - .modelName(codestral) - .displayName("Codestral") - .inputCost(1) - .outputCost(3) - .contextWindow(32_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.Mistral.getName() + ":" + codestral, + LanguageModel.builder() + .provider(ModelProvider.Mistral) + .modelName(codestral) + .displayName("Codestral") + .inputCost(1) + .outputCost(3) + .contextWindow(32_000) + .apiKeyUsed(true) + .build()); } private void addDeepSeekModels() { String coder = "deepseek-coder"; - models.put(ModelProvider.DEEP_SEEK.getName() + ":" + coder, - LanguageModel.builder() - .provider(ModelProvider.DEEP_SEEK) - .modelName(coder) - .displayName("DeepSeek Coder") - .inputCost(0.14) - .outputCost(0.28) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepSeek.getName() + ":" + coder, + LanguageModel.builder() + .provider(ModelProvider.DeepSeek) + .modelName(coder) + .displayName("DeepSeek Coder") + .inputCost(0.14) + .outputCost(0.28) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); String chat = "deepseek-chat"; - models.put(ModelProvider.DEEP_SEEK.getName() + ":" + chat, - LanguageModel.builder() - .provider(ModelProvider.DEEP_SEEK) - .modelName(chat) - .displayName("DeepSeek Chat") - .inputCost(0.14) - .outputCost(0.28) - .contextWindow(128_000) - .apiKeyUsed(true) - .build()); + models.put(ModelProvider.DeepSeek.getName() + ":" + chat, + LanguageModel.builder() + .provider(ModelProvider.DeepSeek) + .modelName(chat) + .displayName("DeepSeek Chat") + .inputCost(0.14) + .outputCost(0.28) + .contextWindow(128_000) + .apiKeyUsed(true) + .build()); } @NotNull @@ -621,7 +621,7 @@ public List getModels() { String apiKey = openRouterChatModelFactory.getApiKey(); if (apiKey != null && !apiKey.isEmpty()) { openRouterChatModelFactory.getModels().forEach(model -> - modelsCopy.put(ModelProvider.OPEN_ROUTER.getName() + ":" + model.getModelName(), model)); + modelsCopy.put(ModelProvider.OpenRouter.getName() + ":" + model.getModelName(), model)); } return new ArrayList<>(modelsCopy.values()); diff --git a/src/main/java/com/devoxx/genie/service/LLMProviderService.java b/src/main/java/com/devoxx/genie/service/LLMProviderService.java index 52f656aa..005a04d3 100644 --- a/src/main/java/com/devoxx/genie/service/LLMProviderService.java +++ b/src/main/java/com/devoxx/genie/service/LLMProviderService.java @@ -11,7 +11,6 @@ import java.util.List; import java.util.Optional; import java.util.function.Supplier; -import java.util.stream.Collectors; import static com.devoxx.genie.model.enumarations.ModelProvider.*; @@ -21,15 +20,15 @@ public class LLMProviderService { static { DevoxxGenieStateService stateService = DevoxxGenieStateService.getInstance(); - providerKeyMap.put(OPENAI, stateService::getOpenAIKey); - providerKeyMap.put(ANTHROPIC, stateService::getAnthropicKey); - providerKeyMap.put(MISTRAL, stateService::getMistralKey); - providerKeyMap.put(GROQ, stateService::getGroqKey); - providerKeyMap.put(DEEP_INFRA, stateService::getDeepInfraKey); - providerKeyMap.put(GOOGLE, stateService::getGeminiKey); - providerKeyMap.put(DEEP_SEEK, stateService::getDeepSeekKey); - providerKeyMap.put(OPEN_ROUTER, stateService::getOpenRouterKey); - providerKeyMap.put(AZURE_OPEN_AI, stateService::getAzureOpenAIKey); + providerKeyMap.put(OpenAI, stateService::getOpenAIKey); + providerKeyMap.put(Anthropic, stateService::getAnthropicKey); + providerKeyMap.put(Mistral, stateService::getMistralKey); + providerKeyMap.put(Groq, stateService::getGroqKey); + providerKeyMap.put(DeepInfra, stateService::getDeepInfraKey); + providerKeyMap.put(Google, stateService::getGeminiKey); + providerKeyMap.put(DeepSeek, stateService::getDeepSeekKey); + providerKeyMap.put(OpenRouter, stateService::getOpenRouterKey); + providerKeyMap.put(AzureOpenAI, stateService::getAzureOpenAIKey); } @NotNull @@ -65,14 +64,14 @@ private List getModelProvidersWithApiKeyConfigured() { .map(Supplier::get) .filter(key -> !key.isBlank()) .isPresent()) - .collect(Collectors.toList()); + .toList(); } - private List getOptionalProviders() { + private @NotNull List getOptionalProviders() { List optionalModelProviders = new ArrayList<>(); - if (DevoxxGenieStateService.getInstance().getShowAzureOpenAIFields()) { - optionalModelProviders.add(AZURE_OPEN_AI); + if (Boolean.TRUE.equals(DevoxxGenieStateService.getInstance().getShowAzureOpenAIFields())) { + optionalModelProviders.add(AzureOpenAI); } return optionalModelProviders; diff --git a/src/main/java/com/devoxx/genie/service/ProjectContentService.java b/src/main/java/com/devoxx/genie/service/ProjectContentService.java index 8da83c2b..0b684c33 100644 --- a/src/main/java/com/devoxx/genie/service/ProjectContentService.java +++ b/src/main/java/com/devoxx/genie/service/ProjectContentService.java @@ -77,9 +77,9 @@ public CompletableFuture getDirectoryContent(Project project, public static Encoding getEncodingForProvider(@NotNull ModelProvider provider) { return switch (provider) { - case OPENAI, ANTHROPIC, GOOGLE, AZURE_OPEN_AI -> + case OpenAI, Anthropic, Google, AzureOpenAI -> Encodings.newDefaultEncodingRegistry().getEncoding(EncodingType.CL100K_BASE); - case MISTRAL, DEEP_INFRA, GROQ, DEEP_SEEK, OPEN_ROUTER -> + case Mistral, DeepInfra, Groq, DeepSeek, OpenRouter -> // These often use the Llama tokenizer or similar Encodings.newDefaultEncodingRegistry().getEncoding(EncodingType.R50K_BASE); default -> diff --git a/src/main/java/com/devoxx/genie/service/PromptExecutionService.java b/src/main/java/com/devoxx/genie/service/PromptExecutionService.java index 06cf75cf..82b93231 100644 --- a/src/main/java/com/devoxx/genie/service/PromptExecutionService.java +++ b/src/main/java/com/devoxx/genie/service/PromptExecutionService.java @@ -123,7 +123,7 @@ private boolean isCanceled() { ChatMemoryService.getInstance().add(chatMessageContext.getProject(), response.content()); return response; } catch (Exception e) { - if (chatMessageContext.getLanguageModel().getProvider().equals(ModelProvider.JAN)) { + if (chatMessageContext.getLanguageModel().getProvider().equals(ModelProvider.Jan)) { throw new ModelNotActiveException("Selected Jan model is not active. Download and make it active or add API Key in Jan settings."); } ChatMemoryService.getInstance().removeLast(chatMessageContext.getProject()); diff --git a/src/main/java/com/devoxx/genie/ui/panel/ActionButtonsPanel.java b/src/main/java/com/devoxx/genie/ui/panel/ActionButtonsPanel.java index 839703fe..39153559 100644 --- a/src/main/java/com/devoxx/genie/ui/panel/ActionButtonsPanel.java +++ b/src/main/java/com/devoxx/genie/ui/panel/ActionButtonsPanel.java @@ -293,14 +293,14 @@ private void removeProjectContext() { } private boolean isSupportedProvider(@NotNull ModelProvider modelProvider) { - return modelProvider.equals(ModelProvider.GOOGLE) || - modelProvider.equals(ModelProvider.ANTHROPIC) || - modelProvider.equals(ModelProvider.OPENAI) || - modelProvider.equals(ModelProvider.MISTRAL) || - modelProvider.equals(ModelProvider.DEEP_SEEK) || - modelProvider.equals(ModelProvider.OPEN_ROUTER) || - modelProvider.equals(ModelProvider.DEEP_INFRA) || - modelProvider.equals(ModelProvider.OLLAMA); + return modelProvider.equals(ModelProvider.Google) || + modelProvider.equals(ModelProvider.Anthropic) || + modelProvider.equals(ModelProvider.OpenAI) || + modelProvider.equals(ModelProvider.Mistral) || + modelProvider.equals(ModelProvider.DeepSeek) || + modelProvider.equals(ModelProvider.OpenRouter) || + modelProvider.equals(ModelProvider.DeepInfra) || + modelProvider.equals(ModelProvider.Ollama); } private void addProjectToContext() { diff --git a/src/main/java/com/devoxx/genie/ui/panel/ChatResponsePanel.java b/src/main/java/com/devoxx/genie/ui/panel/ChatResponsePanel.java index 4180bf2f..b6b7eea1 100644 --- a/src/main/java/com/devoxx/genie/ui/panel/ChatResponsePanel.java +++ b/src/main/java/com/devoxx/genie/ui/panel/ChatResponsePanel.java @@ -161,7 +161,7 @@ private void processGitDiff(@NotNull ChatMessageContext chatMessageContext, @Not * @return the updated token usage */ private static TokenUsage calcOllamaInputTokenCount(@NotNull ChatMessageContext chatMessageContext, TokenUsage tokenUsage) { - if (chatMessageContext.getLanguageModel().getProvider().equals(ModelProvider.OLLAMA)) { + if (chatMessageContext.getLanguageModel().getProvider().equals(ModelProvider.Ollama)) { int inputContextTokens = 0; if (chatMessageContext.getContext() != null) { Encoding encodingForProvider = ProjectContentService.getEncodingForProvider(chatMessageContext.getLanguageModel().getProvider()); diff --git a/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java b/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java index 312b121a..a2089d9b 100644 --- a/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java +++ b/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java @@ -117,23 +117,23 @@ public void addModelProvidersToComboBox() { providerService.getAvailableModelProviders().stream() .filter(provider -> switch (provider) { - case OLLAMA -> stateService.isOllamaEnabled(); - case LMSTUDIO -> stateService.isLmStudioEnabled(); - case GPT_4_ALL -> stateService.isGpt4AllEnabled(); - case JAN -> stateService.isJanEnabled(); - case EXO -> stateService.isExoEnabled(); - case LLAMA -> stateService.isLlamaCPPEnabled(); - case JLAMA -> stateService.isJlamaEnabled(); - case CUSTOM_OPEN_AI -> stateService.isCustomOpenAIEnabled(); - case OPENAI -> stateService.isOpenAIEnabled(); - case MISTRAL -> stateService.isMistralEnabled(); - case ANTHROPIC -> stateService.isAnthropicEnabled(); - case GROQ -> stateService.isGroqEnabled(); - case DEEP_INFRA -> stateService.isDeepInfraEnabled(); - case GOOGLE -> stateService.isGoogleEnabled(); - case DEEP_SEEK -> stateService.isDeepSeekEnabled(); - case OPEN_ROUTER -> stateService.isOpenRouterEnabled(); - case AZURE_OPEN_AI -> stateService.isAzureOpenAIEnabled(); + case Ollama -> stateService.isOllamaEnabled(); + case LMStudio -> stateService.isLmStudioEnabled(); + case GPT4All -> stateService.isGpt4AllEnabled(); + case Jan -> stateService.isJanEnabled(); + case Exo -> stateService.isExoEnabled(); + case LLaMA -> stateService.isLlamaCPPEnabled(); + case Jlama -> stateService.isJlamaEnabled(); + case CustomOpenAI -> stateService.isCustomOpenAIEnabled(); + case OpenAI -> stateService.isOpenAIEnabled(); + case Mistral -> stateService.isMistralEnabled(); + case Anthropic -> stateService.isAnthropicEnabled(); + case Groq -> stateService.isGroqEnabled(); + case DeepInfra -> stateService.isDeepInfraEnabled(); + case Google -> stateService.isGoogleEnabled(); + case DeepSeek -> stateService.isDeepSeekEnabled(); + case OpenRouter -> stateService.isOpenRouterEnabled(); + case AzureOpenAI -> stateService.isAzureOpenAIEnabled(); }) .distinct() .sorted(Comparator.comparing(ModelProvider::getName)) @@ -149,7 +149,7 @@ private void refreshModels() { return; } - if (selectedProvider == ModelProvider.LMSTUDIO || selectedProvider == ModelProvider.OLLAMA || selectedProvider == ModelProvider.JAN) { + if (selectedProvider == ModelProvider.LMStudio || selectedProvider == ModelProvider.Ollama || selectedProvider == ModelProvider.Jan) { ApplicationManager.getApplication().invokeLater(() -> { refreshButton.setEnabled(false); diff --git a/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java b/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java index 370c9f04..6394376f 100644 --- a/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java +++ b/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java @@ -241,9 +241,9 @@ public void setSelectedProvider(@NotNull String projectLocation, String selected public String getSelectedProvider(@NotNull String projectLocation) { if (lastSelectedProvider != null) { - return lastSelectedProvider.getOrDefault(projectLocation, ModelProvider.OLLAMA.getName()); + return lastSelectedProvider.getOrDefault(projectLocation, ModelProvider.Ollama.getName()); } else { - return ModelProvider.OLLAMA.getName(); + return ModelProvider.Ollama.getName(); } } diff --git a/src/main/java/com/devoxx/genie/util/ChatMessageContextUtil.java b/src/main/java/com/devoxx/genie/util/ChatMessageContextUtil.java index 22ba68ae..10b6c228 100644 --- a/src/main/java/com/devoxx/genie/util/ChatMessageContextUtil.java +++ b/src/main/java/com/devoxx/genie/util/ChatMessageContextUtil.java @@ -28,6 +28,9 @@ public class ChatMessageContextUtil { public static final int ZERO_SECONDS = 0; public static final int SIXTY_SECONDS = 60; + private ChatMessageContextUtil() { + } + public static @NotNull ChatMessageContext createContext(Project project, String userPromptText, LanguageModel languageModel, @@ -125,7 +128,7 @@ private static void addEditorInfoToMessageContext(Editor editor, public static boolean isOpenAIo1Model(LanguageModel languageModel) { return languageModel != null && - languageModel.getProvider() == ModelProvider.OPENAI && + languageModel.getProvider() == ModelProvider.OpenAI && languageModel.getModelName() != null && languageModel.getModelName().toLowerCase().startsWith("o1-"); } diff --git a/src/main/java/com/devoxx/genie/util/DefaultLLMSettingsUtil.java b/src/main/java/com/devoxx/genie/util/DefaultLLMSettingsUtil.java index 133e29c0..d891575e 100644 --- a/src/main/java/com/devoxx/genie/util/DefaultLLMSettingsUtil.java +++ b/src/main/java/com/devoxx/genie/util/DefaultLLMSettingsUtil.java @@ -11,16 +11,6 @@ public class DefaultLLMSettingsUtil { public static final Map DEFAULT_INPUT_COSTS = new HashMap<>(); public static final Map DEFAULT_OUTPUT_COSTS = new HashMap<>(); -// public static void initializeDefaultCosts() { -// LLMModelRegistryService modelRegistry = LLMModelRegistryService.getInstance(); -// for (LanguageModel model : modelRegistry.getModels()) { -// if (isApiKeyBasedProvider(model.getProvider())) { -// DEFAULT_INPUT_COSTS.put(new CostKey(model.getProvider(), model.getModelName()), model.getInputCost()); -// DEFAULT_OUTPUT_COSTS.put(new CostKey(model.getProvider(), model.getModelName()), model.getOutputCost()); -// } -// } -// } - /** * Does the ModelProvider use an API KEY? * @@ -28,14 +18,14 @@ public class DefaultLLMSettingsUtil { * @return true when API Key is required, meaning a cost is involved */ public static boolean isApiKeyBasedProvider(ModelProvider provider) { - return provider == ModelProvider.OPENAI || - provider == ModelProvider.ANTHROPIC || - provider == ModelProvider.MISTRAL || - provider == ModelProvider.GROQ || - provider == ModelProvider.DEEP_INFRA || - provider == ModelProvider.GOOGLE || - provider == ModelProvider.OPEN_ROUTER || - provider == ModelProvider.AZURE_OPEN_AI; + return provider == ModelProvider.OpenAI || + provider == ModelProvider.Anthropic || + provider == ModelProvider.Mistral || + provider == ModelProvider.Groq || + provider == ModelProvider.DeepInfra || + provider == ModelProvider.Google || + provider == ModelProvider.OpenRouter || + provider == ModelProvider.AzureOpenAI; } public record CostKey(ModelProvider provider, String modelName) { diff --git a/src/main/java/com/devoxx/genie/util/LLMProviderUtil.java b/src/main/java/com/devoxx/genie/util/LLMProviderUtil.java deleted file mode 100644 index 268626ef..00000000 --- a/src/main/java/com/devoxx/genie/util/LLMProviderUtil.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.devoxx.genie.util; - -import com.devoxx.genie.model.enumarations.ModelProvider; -import com.devoxx.genie.service.DevoxxGenieSettingsService; -import com.devoxx.genie.ui.settings.DevoxxGenieStateService; - -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -public class LLMProviderUtil { - - /** - * Get LLM providers for which an API Key is defined in the settings - * - * @return List of LLM providers - */ - public static List getApiKeyEnabledProviders() { - DevoxxGenieSettingsService settings = DevoxxGenieStateService.getInstance(); - return Arrays.stream(ModelProvider.values()) - .filter(provider -> switch (provider) { - case OPENAI -> !settings.getOpenAIKey().isEmpty(); - case AZURE_OPEN_AI -> !settings.getAzureOpenAIKey().isEmpty() && - !settings.getAzureOpenAIEndpoint().isEmpty() && !settings.getAzureOpenAIDeployment().isEmpty(); - case ANTHROPIC -> !settings.getAnthropicKey().isEmpty(); - case MISTRAL -> !settings.getMistralKey().isEmpty(); - case GROQ -> !settings.getGroqKey().isEmpty(); - case DEEP_INFRA -> !settings.getDeepInfraKey().isEmpty(); - case DEEP_SEEK -> !settings.getDeepSeekKey().isEmpty(); - case OPEN_ROUTER -> !settings.getOpenRouterKey().isEmpty(); - case GOOGLE -> !settings.getGeminiKey().isEmpty(); - default -> false; - }) - .collect(Collectors.toList()); - } -} diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 46055b1b..bca927cc 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,2 +1,2 @@ -#Tue Dec 10 09:10:31 CET 2024 +#Tue Dec 10 11:05:00 CET 2024 version=0.4.1