From fbc60a26d2a2e4614b03820965228d36637e692e Mon Sep 17 00:00:00 2001 From: Stephan Janssen Date: Tue, 17 Dec 2024 11:31:19 +0100 Subject: [PATCH] List 'custom local model' when enabled in dropdown + Removed Exo & JLama because they can use the Custom OpenAI-compliant local provider --- .../model/enumarations/ModelProvider.java | 14 +- .../service/DevoxxGenieSettingsService.java | 8 +- .../chatmodel/ChatModelFactoryProvider.java | 24 ++- .../genie/chatmodel/ChatModelProvider.java | 7 +- .../CustomOpenAIChatModelFactory.java} | 16 +- .../chatmodel/exo/ExoChatModelFactory.java | 153 ------------------ .../openai/OpenAIChatModelFactory.java | 8 +- .../ActionButtonsPanelController.java | 5 +- .../java/com/devoxx/genie/model/Constant.java | 2 - .../genie/ui/panel/LlmProviderPanel.java | 21 ++- .../ui/settings/DevoxxGenieStateService.java | 16 +- .../settings/llm/LLMProvidersComponent.java | 41 ++--- .../llm/LLMProvidersConfigurable.java | 33 ++-- src/main/resources/META-INF/plugin.xml | 2 + 14 files changed, 86 insertions(+), 264 deletions(-) rename src/main/java/com/devoxx/genie/chatmodel/{jlama/JLamaChatModelFactory.java => customopenai/CustomOpenAIChatModelFactory.java} (66%) delete mode 100644 src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java diff --git a/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java b/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java index 2732df44..22516cc9 100644 --- a/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java +++ b/core/src/main/java/com/devoxx/genie/model/enumarations/ModelProvider.java @@ -8,23 +8,23 @@ @Getter public enum ModelProvider { - Ollama("Ollama", Type.LOCAL), - LMStudio("LMStudio", Type.LOCAL), + CustomOpenAI("CustomOpenAI", Type.LOCAL), GPT4All("GPT4All", Type.LOCAL), Jan("Jan", Type.LOCAL), + LLaMA("LLaMA.c++", Type.LOCAL), + LMStudio("LMStudio", Type.LOCAL), + Ollama("Ollama", Type.LOCAL), + OpenAI("OpenAI", Type.CLOUD), Anthropic("Anthropic", Type.CLOUD), Mistral("Mistral", Type.CLOUD), Groq("Groq", Type.CLOUD), DeepInfra("DeepInfra", Type.CLOUD), Google("Google", Type.CLOUD), - Exo("Exo (Experimental)", Type.LOCAL), - LLaMA("LLaMA.c++", Type.LOCAL), OpenRouter("OpenRouter", Type.CLOUD), DeepSeek("DeepSeek", Type.CLOUD), - Jlama("Jlama (Experimental /w REST API)", Type.LOCAL), - AzureOpenAI("AzureOpenAI", Type.OPTIONAL), - CustomOpenAI("CustomOpenAI", Type.OPTIONAL); + + AzureOpenAI("AzureOpenAI", Type.OPTIONAL); public enum Type { LOCAL, // Local Providers diff --git a/core/src/main/java/com/devoxx/genie/service/DevoxxGenieSettingsService.java b/core/src/main/java/com/devoxx/genie/service/DevoxxGenieSettingsService.java index cf0b57f5..9a6f5c57 100644 --- a/core/src/main/java/com/devoxx/genie/service/DevoxxGenieSettingsService.java +++ b/core/src/main/java/com/devoxx/genie/service/DevoxxGenieSettingsService.java @@ -20,8 +20,6 @@ public interface DevoxxGenieSettingsService { String getJanModelUrl(); - String getExoModelUrl(); - String getOpenAIKey(); String getAzureOpenAIEndpoint(); @@ -104,8 +102,6 @@ public interface DevoxxGenieSettingsService { void setJanModelUrl(String url); - void setExoModelUrl(String url); - void setOpenAIKey(String key); void setAzureOpenAIEndpoint(String endpoint); @@ -188,7 +184,7 @@ public interface DevoxxGenieSettingsService { String getCustomOpenAIUrl(); - String getJlamaUrl(); + void setCustomOpenAIModelName(String text); - void setJlamaUrl(String text); + String getCustomOpenAIModelName(); } diff --git a/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java b/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java index 32adb3be..64dc5823 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java +++ b/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java @@ -2,14 +2,13 @@ import com.devoxx.genie.chatmodel.anthropic.AnthropicChatModelFactory; import com.devoxx.genie.chatmodel.azureopenai.AzureOpenAIChatModelFactory; +import com.devoxx.genie.chatmodel.customopenai.CustomOpenAIChatModelFactory; import com.devoxx.genie.chatmodel.deepinfra.DeepInfraChatModelFactory; import com.devoxx.genie.chatmodel.deepseek.DeepSeekChatModelFactory; -import com.devoxx.genie.chatmodel.exo.ExoChatModelFactory; import com.devoxx.genie.chatmodel.google.GoogleChatModelFactory; import com.devoxx.genie.chatmodel.gpt4all.GPT4AllChatModelFactory; import com.devoxx.genie.chatmodel.groq.GroqChatModelFactory; import com.devoxx.genie.chatmodel.jan.JanChatModelFactory; -import com.devoxx.genie.chatmodel.jlama.JLamaChatModelFactory; import com.devoxx.genie.chatmodel.lmstudio.LMStudioChatModelFactory; import com.devoxx.genie.chatmodel.mistral.MistralChatModelFactory; import com.devoxx.genie.chatmodel.ollama.OllamaChatModelFactory; @@ -42,21 +41,20 @@ private ChatModelFactoryProvider() { */ private static @Nullable ChatModelFactory createFactory(@NotNull String modelProvider) { return switch (modelProvider) { - case "Ollama" -> new OllamaChatModelFactory(); - case "Jan" -> new JanChatModelFactory(); - case "OpenRouter" -> new OpenRouterChatModelFactory(); - case "LMStudio" -> new LMStudioChatModelFactory(); - case "Exo" -> new ExoChatModelFactory(); - case "OpenAI" -> new OpenAIChatModelFactory(); case "Anthropic" -> new AnthropicChatModelFactory(); - case "Mistral" -> new MistralChatModelFactory(); - case "Groq" -> new GroqChatModelFactory(); + case "AzureOpenAI" -> new AzureOpenAIChatModelFactory(); + case "CustomOpenAI" -> new CustomOpenAIChatModelFactory(); case "DeepInfra" -> new DeepInfraChatModelFactory(); - case "Google" -> new GoogleChatModelFactory(); case "DeepSeek" -> new DeepSeekChatModelFactory(); - case "Jlama" -> new JLamaChatModelFactory(); - case "AzureOpenAI" -> new AzureOpenAIChatModelFactory(); + case "Google" -> new GoogleChatModelFactory(); + case "Groq" -> new GroqChatModelFactory(); case "GPT4All" -> new GPT4AllChatModelFactory(); + case "Jan" -> new JanChatModelFactory(); + case "LMStudio" -> new LMStudioChatModelFactory(); + case "Mistral" -> new MistralChatModelFactory(); + case "Ollama" -> new OllamaChatModelFactory(); + case "OpenAI" -> new OpenAIChatModelFactory(); + case "OpenRouter" -> new OpenRouterChatModelFactory(); default -> null; }; } diff --git a/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java b/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java index 4663d2e7..79ea4b22 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java +++ b/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java @@ -81,14 +81,11 @@ private void setLocalBaseUrl(@NotNull LanguageModel languageModel, case GPT4All: chatModel.setBaseUrl(stateService.getGpt4allModelUrl()); break; - case Exo: - chatModel.setBaseUrl(stateService.getExoModelUrl()); - break; case LLaMA: chatModel.setBaseUrl(stateService.getLlamaCPPUrl()); break; - case Jlama: - chatModel.setBaseUrl(stateService.getJlamaUrl()); + case CustomOpenAI: + chatModel.setBaseUrl(stateService.getCustomOpenAIUrl()); break; // Add other local providers as needed } diff --git a/src/main/java/com/devoxx/genie/chatmodel/jlama/JLamaChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/customopenai/CustomOpenAIChatModelFactory.java similarity index 66% rename from src/main/java/com/devoxx/genie/chatmodel/jlama/JLamaChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/customopenai/CustomOpenAIChatModelFactory.java index 689bcdc0..984d37c2 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/jlama/JLamaChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/customopenai/CustomOpenAIChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.jlama; +package com.devoxx.genie.chatmodel.customopenai; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; @@ -14,13 +14,14 @@ import java.util.Collections; import java.util.List; -public class JLamaChatModelFactory implements ChatModelFactory { +public class CustomOpenAIChatModelFactory implements ChatModelFactory { @Override public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) { + DevoxxGenieStateService stateInstance = DevoxxGenieStateService.getInstance(); return LocalAiChatModel.builder() - .baseUrl(DevoxxGenieStateService.getInstance().getJlamaUrl()) - .modelName(TEST_MODEL) + .baseUrl(stateInstance.getCustomOpenAIUrl()) + .modelName(stateInstance.getCustomOpenAIModelName().isBlank()?"default":stateInstance.getCustomOpenAIModelName()) .maxRetries(chatModel.getMaxRetries()) .temperature(chatModel.getTemperature()) .maxTokens(chatModel.getMaxTokens()) @@ -31,9 +32,10 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) { @Override public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) { + DevoxxGenieStateService stateInstance = DevoxxGenieStateService.getInstance(); return LocalAiStreamingChatModel.builder() - .baseUrl(DevoxxGenieStateService.getInstance().getJlamaUrl()) - .modelName(TEST_MODEL) + .baseUrl(stateInstance.getCustomOpenAIUrl()) + .modelName(stateInstance.getCustomOpenAIModelName().isBlank()?"default":stateInstance.getCustomOpenAIModelName()) .temperature(chatModel.getTemperature()) .topP(chatModel.getTopP()) .timeout(Duration.ofSeconds(chatModel.getTimeout())) @@ -41,7 +43,7 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch } /** - * Get the model names from the Jlama service. + * Get the model names from the custom local OpenAI compliant service. * @return List of model names */ @Override diff --git a/src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java deleted file mode 100644 index cd42eac9..00000000 --- a/src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java +++ /dev/null @@ -1,153 +0,0 @@ -package com.devoxx.genie.chatmodel.exo; - -import com.devoxx.genie.chatmodel.ChatModelFactory; -import com.devoxx.genie.model.ChatModel; -import com.devoxx.genie.model.LanguageModel; -import com.devoxx.genie.model.enumarations.ModelProvider; -import com.devoxx.genie.ui.settings.DevoxxGenieStateService; -import dev.langchain4j.model.chat.ChatLanguageModel; -import dev.langchain4j.model.chat.StreamingChatLanguageModel; -import dev.langchain4j.model.localai.LocalAiChatModel; -import dev.langchain4j.model.localai.LocalAiStreamingChatModel; -import org.jetbrains.annotations.NotNull; - -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; - -public class ExoChatModelFactory implements ChatModelFactory { - - private final ModelProvider MODEL_PROVIDER = ModelProvider.Exo;; - - @Override - public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) { - return LocalAiChatModel.builder() - .baseUrl(DevoxxGenieStateService.getInstance().getExoModelUrl()) - .modelName(chatModel.getModelName()) - .temperature(chatModel.getTemperature()) - .topP(chatModel.getTopP()) - .maxRetries(chatModel.getMaxRetries()) - .timeout(Duration.ofSeconds(chatModel.getTimeout())) - .build(); - } - - // TODO: Currently gives an error in regards to content-type - @Override - public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) { - return LocalAiStreamingChatModel.builder() - .baseUrl(DevoxxGenieStateService.getInstance().getExoModelUrl()) - .modelName(chatModel.getModelName()) - .temperature(chatModel.getTemperature()) - .topP(chatModel.getTopP()) - .timeout(Duration.ofSeconds(chatModel.getTimeout())) - .build(); - } - - /** - * Get the models for Exo - * - * @return List of model names - */ - @Override - public List getModels() { - - // 'llama-3-8b', 'llama-3.1-8b', 'llama-3.1-70b', 'llama-3.1-405b', 'llama-3-70b'] - - LanguageModel model2 = LanguageModel.builder() - .modelName("llama-3.1-405b") - .displayName("Llama 3.1 405B") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(131_000) - .build(); - - LanguageModel model1 = LanguageModel.builder() - .modelName("llama-3.1-8b") - .displayName("Llama 3.1 8B") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(8_000) - .build(); - - LanguageModel model3 = LanguageModel.builder() - .modelName("llama-3.1-70b") - .displayName("Llama 3.1 70B") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(131_000) - .build(); - - LanguageModel model4 = LanguageModel.builder() - .modelName("llama-3-8b") - .displayName("Llama 3 8B") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(8_000) - .build(); - - // mistral-nemo - LanguageModel model5 = LanguageModel.builder() - .modelName("mistral-nemo") - .displayName("Mistral Nemo") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(8_000) - .build(); - - // mistral-large - LanguageModel model6 = LanguageModel.builder() - .modelName("mistral-large") - .displayName("Mistral Large") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(8_000) - .build(); - - // deepseek-coder-v2-lite - LanguageModel model7 = LanguageModel.builder() - .modelName("deepseek-coder-v2-lite") - .displayName("Deepseek Coder V2 Lite") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(8_000) - .build(); - - // llava-1.5-7b-hf - LanguageModel model8 = LanguageModel.builder() - .modelName("llava-1.5-7b-hf") - .displayName("Llava 1.5 7B HF") - .apiKeyUsed(false) - .provider(MODEL_PROVIDER) - .outputCost(0) - .inputCost(0) - .contextWindow(8_000) - .build(); - - List modelNames = new ArrayList<>(); - modelNames.add(model1); - modelNames.add(model2); - modelNames.add(model3); - modelNames.add(model4); - modelNames.add(model5); - modelNames.add(model6); - modelNames.add(model7); - modelNames.add(model8); - - return modelNames; - } -} - diff --git a/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java index 58e0728c..3f8e56e5 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java @@ -35,8 +35,8 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) { builder.baseUrl(DevoxxGenieStateService.getInstance().getCustomOpenAIUrl()); } - if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModel())) { - builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModel()); + if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName())) { + builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName()); } return builder.build(); @@ -56,8 +56,8 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch builder.baseUrl(DevoxxGenieStateService.getInstance().getCustomOpenAIUrl()); } - if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModel())) { - builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModel()); + if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName())) { + builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName()); } return builder.build(); diff --git a/src/main/java/com/devoxx/genie/controller/ActionButtonsPanelController.java b/src/main/java/com/devoxx/genie/controller/ActionButtonsPanelController.java index 8da8b929..e49c2587 100644 --- a/src/main/java/com/devoxx/genie/controller/ActionButtonsPanelController.java +++ b/src/main/java/com/devoxx/genie/controller/ActionButtonsPanelController.java @@ -129,9 +129,8 @@ private LanguageModel createDefaultLanguageModel(@NotNull DevoxxGenieSettingsSer ModelProvider selectedProvider = (ModelProvider) modelProviderComboBox.getSelectedItem(); if (selectedProvider != null && (selectedProvider.equals(LMStudio) || - selectedProvider.equals(GPT4All) || - selectedProvider.equals(Jlama) || - selectedProvider.equals(LLaMA))) { + selectedProvider.equals(GPT4All) || + selectedProvider.equals(LLaMA))) { return LanguageModel.builder() .provider(selectedProvider) .apiKeyUsed(false) diff --git a/src/main/java/com/devoxx/genie/model/Constant.java b/src/main/java/com/devoxx/genie/model/Constant.java index ce070759..bcfab66e 100644 --- a/src/main/java/com/devoxx/genie/model/Constant.java +++ b/src/main/java/com/devoxx/genie/model/Constant.java @@ -36,9 +36,7 @@ private Constant() { public static final String LMSTUDIO_MODEL_URL = "http://localhost:1234/v1/"; public static final String GPT4ALL_MODEL_URL = "http://localhost:4891/v1/"; public static final String JAN_MODEL_URL = "http://localhost:1337/v1/"; - public static final String EXO_MODEL_URL = "http://localhost:8000/v1/"; public static final String LLAMA_CPP_MODEL_URL = "http://localhost:8080"; - public static final String JLAMA_MODEL_URL = "http://localhost:8080/"; // ActionCommands public static final String SUBMIT_ACTION = "submit"; diff --git a/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java b/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java index 293bf5d2..401ee5f2 100644 --- a/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java +++ b/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java @@ -54,7 +54,7 @@ public class LlmProviderPanel extends JBPanel implements LLMSe /** * The conversation panel constructor. * - * @param project the project instance + * @param project the project instance */ public LlmProviderPanel(@NotNull Project project) { super(new BorderLayout()); @@ -121,10 +121,8 @@ public void addModelProvidersToComboBox() { case LMStudio -> stateService.isLmStudioEnabled(); case GPT4All -> stateService.isGpt4AllEnabled(); case Jan -> stateService.isJanEnabled(); - case Exo -> stateService.isExoEnabled(); case LLaMA -> stateService.isLlamaCPPEnabled(); - case Jlama -> stateService.isJlamaEnabled(); - case CustomOpenAI -> stateService.isCustomOpenAIEnabled(); + case CustomOpenAI -> stateService.isCustomOpenAIUrlEnabled(); case OpenAI -> stateService.isOpenAIEnabled(); case Mistral -> stateService.isMistralEnabled(); case Anthropic -> stateService.isAnthropicEnabled(); @@ -150,9 +148,9 @@ private void refreshModels() { } if (selectedProvider == ModelProvider.LMStudio || - selectedProvider == ModelProvider.Ollama || - selectedProvider == ModelProvider.Jan || - selectedProvider == ModelProvider.GPT4All) { + selectedProvider == ModelProvider.Ollama || + selectedProvider == ModelProvider.Jan || + selectedProvider == ModelProvider.GPT4All) { ApplicationManager.getApplication().invokeLater(() -> { refreshButton.setEnabled(false); @@ -168,7 +166,8 @@ private void refreshModels() { }); } else { - NotificationUtil.sendNotification(project, "Model refresh is only available for LMStudio, Ollama and Jan providers."); + NotificationUtil.sendNotification(project, + "Model refresh is only available for LMStudio, Ollama, GPT4All and Jan providers."); } } @@ -287,12 +286,14 @@ private void handleModelProviderSelectionChange(@NotNull ActionEvent e) { isUpdatingModelNames = true; try { + DevoxxGenieStateService stateInstance = DevoxxGenieStateService.getInstance(); JComboBox comboBox = (JComboBox) e.getSource(); ModelProvider modelProvider = (ModelProvider) comboBox.getSelectedItem(); if (modelProvider != null) { - DevoxxGenieStateService.getInstance().setSelectedProvider(project.getLocationHash(), modelProvider.getName()); + stateInstance.setSelectedProvider(project.getLocationHash(), modelProvider.getName()); updateModelNamesComboBox(modelProvider.getName()); + modelNameComboBox.setRenderer(new ModelInfoRenderer()); modelNameComboBox.revalidate(); modelNameComboBox.repaint(); @@ -301,6 +302,4 @@ private void handleModelProviderSelectionChange(@NotNull ActionEvent e) { isUpdatingModelNames = false; } } - - } diff --git a/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java b/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java index 6cb5b4f2..63eca67c 100644 --- a/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java +++ b/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java @@ -32,6 +32,7 @@ public static DevoxxGenieStateService getInstance() { return ApplicationManager.getApplication().getService(DevoxxGenieStateService.class); } + // Default excluded files for scan project private List excludedFiles = new ArrayList<>(Arrays.asList( "package-lock.json", "yarn.lock", "pom.xml", "build.gradle", "settings.gradle" )); @@ -73,22 +74,22 @@ public static DevoxxGenieStateService getInstance() { private String lmstudioModelUrl = LMSTUDIO_MODEL_URL; private String gpt4allModelUrl = GPT4ALL_MODEL_URL; private String janModelUrl = JAN_MODEL_URL; - private String exoModelUrl = EXO_MODEL_URL; private String llamaCPPUrl = LLAMA_CPP_MODEL_URL; - private String jlamaUrl = JLAMA_MODEL_URL; + + // Local custom OpenAI-compliant LLM fields private String customOpenAIUrl = ""; - private String customOpenAIModel = ""; + private String customOpenAIModelName = ""; // Local LLM Providers private boolean isOllamaEnabled = true; private boolean isLmStudioEnabled = true; private boolean isGpt4AllEnabled = true; private boolean isJanEnabled = true; - private boolean isExoEnabled = true; private boolean isLlamaCPPEnabled = true; - private boolean isJlamaEnabled = true; - private boolean isCustomOpenAIEnabled = false; - private boolean isCustomOpenAIModelEnabled = false; + + // Local custom OpenAI-compliant LLM fields + private boolean isCustomOpenAIUrlEnabled = false; + private boolean isCustomOpenAIModelNameEnabled = false; // Remote LLM Providers private boolean isOpenAIEnabled = false; @@ -99,7 +100,6 @@ public static DevoxxGenieStateService getInstance() { private boolean isGoogleEnabled = false; private boolean isDeepSeekEnabled = false; private boolean isOpenRouterEnabled = false; - private boolean isAzureOpenAIEnabled = false; // LLM API Keys private String openAIKey = ""; diff --git a/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersComponent.java b/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersComponent.java index 84215f04..23067ac5 100644 --- a/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersComponent.java +++ b/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersComponent.java @@ -24,15 +24,11 @@ public class LLMProvidersComponent extends AbstractSettingsComponent { @Getter private final JTextField janModelUrlField = new JTextField(stateService.getJanModelUrl()); @Getter - private final JTextField exoModelUrlField = new JTextField(stateService.getExoModelUrl()); - @Getter private final JTextField llamaCPPModelUrlField = new JTextField(stateService.getLlamaCPPUrl()); @Getter - private final JTextField jlamaModelUrlField = new JTextField(stateService.getJlamaUrl()); - @Getter private final JTextField customOpenAIUrlField = new JTextField(stateService.getCustomOpenAIUrl()); @Getter - private final JTextField customOpenAIModelField = new JTextField(stateService.getCustomOpenAIModel()); + private final JTextField customOpenAIModelNameField = new JTextField(stateService.getCustomOpenAIModelName()); @Getter private final JPasswordField openAIKeyField = new JPasswordField(stateService.getOpenAIKey()); @Getter @@ -67,16 +63,11 @@ public class LLMProvidersComponent extends AbstractSettingsComponent { @Getter private final JCheckBox janEnabledCheckBox = new JCheckBox("", stateService.isJanEnabled()); @Getter - private final JCheckBox exoEnabledCheckBox = new JCheckBox("", stateService.isExoEnabled()); - @Getter private final JCheckBox llamaCPPEnabledCheckBox = new JCheckBox("", stateService.isLlamaCPPEnabled()); @Getter - private final JCheckBox jlamaEnabledCheckBox = new JCheckBox("", stateService.isJlamaEnabled()); + private final JCheckBox customOpenAIUrlEnabledCheckBox = new JCheckBox("", stateService.isCustomOpenAIUrlEnabled()); @Getter - private final JCheckBox customOpenAIEnabledCheckBox = new JCheckBox("", stateService.isCustomOpenAIEnabled()); - @Getter - private final JCheckBox customOpenAIModelEnabledCheckBox = new JCheckBox("", stateService.isCustomOpenAIModelEnabled()); - + private final JCheckBox customOpenAIModelNameEnabledCheckBox = new JCheckBox("", stateService.isCustomOpenAIModelNameEnabled()); @Getter private final JCheckBox openAIEnabledCheckBox = new JCheckBox("", stateService.isOpenAIEnabled()); @Getter @@ -126,14 +117,10 @@ public JPanel createPanel() { createTextWithLinkButton(gpt4AllModelUrlField, "https://gpt4all.io/")); addProviderSettingRow(panel, gbc, "Jan URL", janEnabledCheckBox, createTextWithLinkButton(janModelUrlField, "https://jan.ai/download")); - addProviderSettingRow(panel, gbc, "Exo URL", exoEnabledCheckBox, - createTextWithLinkButton(exoModelUrlField, "https://github.com/exo-explore/exo")); addProviderSettingRow(panel, gbc, "LLaMA.c++ URL", llamaCPPEnabledCheckBox, createTextWithLinkButton(llamaCPPModelUrlField, "https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md")); - addProviderSettingRow(panel, gbc, "JLama URL", jlamaEnabledCheckBox, - createTextWithLinkButton(jlamaModelUrlField, "https://github.com/tjake/Jlama")); - addProviderSettingRow(panel, gbc, "Custom OpenAI URL", customOpenAIEnabledCheckBox, customOpenAIUrlField); - addProviderSettingRow(panel, gbc, "Custom OpenAI Model", customOpenAIModelEnabledCheckBox, customOpenAIModelField); + addProviderSettingRow(panel, gbc, "Custom OpenAI URL", customOpenAIUrlEnabledCheckBox, customOpenAIUrlField); + addProviderSettingRow(panel, gbc, "Custom OpenAI Model", customOpenAIModelNameEnabledCheckBox, customOpenAIModelNameField); // Cloud LLM Providers section addSection(panel, gbc, "Cloud LLM Providers"); @@ -181,10 +168,10 @@ public void addListeners() { lmStudioEnabledCheckBox.addItemListener(e -> updateUrlFieldState(lmStudioEnabledCheckBox, lmStudioModelUrlField)); gpt4AllEnabledCheckBox.addItemListener(e -> updateUrlFieldState(gpt4AllEnabledCheckBox, gpt4AllModelUrlField)); janEnabledCheckBox.addItemListener(e -> updateUrlFieldState(janEnabledCheckBox, janModelUrlField)); - exoEnabledCheckBox.addItemListener(e -> updateUrlFieldState(exoEnabledCheckBox, exoModelUrlField)); llamaCPPEnabledCheckBox.addItemListener(e -> updateUrlFieldState(llamaCPPEnabledCheckBox, llamaCPPModelUrlField)); - jlamaEnabledCheckBox.addItemListener(e -> updateUrlFieldState(jlamaEnabledCheckBox, jlamaModelUrlField)); - customOpenAIEnabledCheckBox.addItemListener(e -> updateUrlFieldState(customOpenAIEnabledCheckBox, customOpenAIUrlField)); + + customOpenAIUrlEnabledCheckBox.addItemListener(e -> updateUrlFieldState(customOpenAIUrlEnabledCheckBox, customOpenAIUrlField)); + customOpenAIModelNameEnabledCheckBox.addItemListener(e -> updateUrlFieldState(customOpenAIModelNameEnabledCheckBox, customOpenAIModelNameField)); openAIEnabledCheckBox.addItemListener(e -> updateUrlFieldState(openAIEnabledCheckBox, openAIKeyField)); mistralEnabledCheckBox.addItemListener(e -> updateUrlFieldState(mistralEnabledCheckBox, mistralApiKeyField)); @@ -197,12 +184,12 @@ public void addListeners() { enableAzureOpenAICheckBox.addItemListener(e -> updateUrlFieldState(enableAzureOpenAICheckBox, azureOpenAIEndpointField)); } - // In LLMProvidersComponent.java - private boolean isAzureConfigValid() { - return !azureOpenAIKeyField.getPassword().toString().isEmpty() - && !azureOpenAIEndpointField.getText().trim().isEmpty() - && !azureOpenAIDeploymentField.getText().trim().isEmpty(); - } +// // In LLMProvidersComponent.java +// private boolean isAzureConfigValid() { +// return !azureOpenAIKeyField.getPassword().toString().isEmpty() +// && !azureOpenAIEndpointField.getText().trim().isEmpty() +// && !azureOpenAIDeploymentField.getText().trim().isEmpty(); +// } private void addAzureOpenAIPanel(JPanel panel, GridBagConstraints gbc) { addSettingRow(panel, gbc, "Enable Azure OpenAI Provider", enableAzureOpenAICheckBox); diff --git a/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersConfigurable.java b/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersConfigurable.java index 881f881a..ffc1b40d 100644 --- a/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersConfigurable.java +++ b/src/main/java/com/devoxx/genie/ui/settings/llm/LLMProvidersConfigurable.java @@ -62,16 +62,15 @@ public boolean isModified() { isModified |= isFieldModified(llmSettingsComponent.getGeminiApiKeyField(), stateService.getGeminiKey()); isModified |= isFieldModified(llmSettingsComponent.getDeepSeekApiKeyField(), stateService.getDeepSeekKey()); isModified |= isFieldModified(llmSettingsComponent.getLlamaCPPModelUrlField(), stateService.getLlamaCPPUrl()); - isModified |= isFieldModified(llmSettingsComponent.getJlamaModelUrlField(), stateService.getJlamaUrl()); isModified |= isFieldModified(llmSettingsComponent.getOpenRouterApiKeyField(), stateService.getOpenRouterKey()); isModified |= isFieldModified(llmSettingsComponent.getOllamaModelUrlField(), stateService.getOllamaModelUrl()); isModified |= isFieldModified(llmSettingsComponent.getLmStudioModelUrlField(), stateService.getLmstudioModelUrl()); isModified |= isFieldModified(llmSettingsComponent.getGpt4AllModelUrlField(), stateService.getGpt4allModelUrl()); isModified |= isFieldModified(llmSettingsComponent.getJanModelUrlField(), stateService.getJanModelUrl()); - isModified |= isFieldModified(llmSettingsComponent.getExoModelUrlField(), stateService.getExoModelUrl()); + isModified |= isFieldModified(llmSettingsComponent.getCustomOpenAIUrlField(), stateService.getCustomOpenAIUrl()); - isModified |= isFieldModified(llmSettingsComponent.getCustomOpenAIModelField(), stateService.getCustomOpenAIModel()); + isModified |= isFieldModified(llmSettingsComponent.getCustomOpenAIModelNameField(), stateService.getCustomOpenAIModelName()); isModified |= !stateService.getShowAzureOpenAIFields().equals(llmSettingsComponent.getEnableAzureOpenAICheckBox().isSelected()); isModified |= isFieldModified(llmSettingsComponent.getAzureOpenAIEndpointField(), stateService.getAzureOpenAIEndpoint()); @@ -82,10 +81,10 @@ public boolean isModified() { isModified |= stateService.isLmStudioEnabled() != llmSettingsComponent.getLmStudioEnabledCheckBox().isSelected(); isModified |= stateService.isGpt4AllEnabled() != llmSettingsComponent.getGpt4AllEnabledCheckBox().isSelected(); isModified |= stateService.isJanEnabled() != llmSettingsComponent.getJanEnabledCheckBox().isSelected(); - isModified |= stateService.isExoEnabled() != llmSettingsComponent.getExoEnabledCheckBox().isSelected(); isModified |= stateService.isLlamaCPPEnabled() != llmSettingsComponent.getLlamaCPPEnabledCheckBox().isSelected(); - isModified |= stateService.isJlamaEnabled() != llmSettingsComponent.getJlamaEnabledCheckBox().isSelected(); - isModified |= stateService.isCustomOpenAIEnabled() != llmSettingsComponent.getCustomOpenAIEnabledCheckBox().isSelected(); + + isModified |= stateService.isCustomOpenAIUrlEnabled() != llmSettingsComponent.getCustomOpenAIUrlEnabledCheckBox().isSelected(); + isModified |= stateService.isCustomOpenAIModelNameEnabled() != llmSettingsComponent.getCustomOpenAIModelNameEnabledCheckBox().isSelected(); isModified |= stateService.isOpenAIEnabled() != llmSettingsComponent.getOpenAIEnabledCheckBox().isSelected(); isModified |= stateService.isMistralEnabled() != llmSettingsComponent.getMistralEnabledCheckBox().isSelected(); @@ -115,11 +114,10 @@ public void apply() { settings.setLmstudioModelUrl(llmSettingsComponent.getLmStudioModelUrlField().getText()); settings.setGpt4allModelUrl(llmSettingsComponent.getGpt4AllModelUrlField().getText()); settings.setJanModelUrl(llmSettingsComponent.getJanModelUrlField().getText()); - settings.setExoModelUrl(llmSettingsComponent.getExoModelUrlField().getText()); settings.setLlamaCPPUrl(llmSettingsComponent.getLlamaCPPModelUrlField().getText()); - settings.setJlamaUrl(llmSettingsComponent.getJlamaModelUrlField().getText()); + settings.setCustomOpenAIUrl(llmSettingsComponent.getCustomOpenAIUrlField().getText()); - settings.setCustomOpenAIModel(llmSettingsComponent.getCustomOpenAIModelField().getText()); + settings.setCustomOpenAIModelName(llmSettingsComponent.getCustomOpenAIModelNameField().getText()); settings.setOpenAIKey(new String(llmSettingsComponent.getOpenAIKeyField().getPassword())); settings.setMistralKey(new String(llmSettingsComponent.getMistralApiKeyField().getPassword())); @@ -139,10 +137,10 @@ public void apply() { settings.setLmStudioEnabled(llmSettingsComponent.getLmStudioEnabledCheckBox().isSelected()); settings.setGpt4AllEnabled(llmSettingsComponent.getGpt4AllEnabledCheckBox().isSelected()); settings.setJanEnabled(llmSettingsComponent.getJanEnabledCheckBox().isSelected()); - settings.setExoEnabled(llmSettingsComponent.getExoEnabledCheckBox().isSelected()); settings.setLlamaCPPEnabled(llmSettingsComponent.getLlamaCPPEnabledCheckBox().isSelected()); - settings.setJlamaEnabled(llmSettingsComponent.getJlamaEnabledCheckBox().isSelected()); - settings.setCustomOpenAIEnabled(llmSettingsComponent.getCustomOpenAIEnabledCheckBox().isSelected()); + + settings.setCustomOpenAIUrlEnabled(llmSettingsComponent.getCustomOpenAIUrlEnabledCheckBox().isSelected()); + settings.setCustomOpenAIModelNameEnabled(llmSettingsComponent.getCustomOpenAIModelNameEnabledCheckBox().isSelected()); settings.setOpenAIEnabled(llmSettingsComponent.getOpenAIEnabledCheckBox().isSelected()); settings.setMistralEnabled(llmSettingsComponent.getMistralEnabledCheckBox().isSelected()); @@ -185,11 +183,10 @@ public void reset() { llmSettingsComponent.getLmStudioModelUrlField().setText(settings.getLmstudioModelUrl()); llmSettingsComponent.getGpt4AllModelUrlField().setText(settings.getGpt4allModelUrl()); llmSettingsComponent.getJanModelUrlField().setText(settings.getJanModelUrl()); - llmSettingsComponent.getExoModelUrlField().setText(settings.getExoModelUrl()); llmSettingsComponent.getLlamaCPPModelUrlField().setText(settings.getLlamaCPPUrl()); - llmSettingsComponent.getJlamaModelUrlField().setText(settings.getJlamaUrl()); + llmSettingsComponent.getCustomOpenAIUrlField().setText(settings.getCustomOpenAIUrl()); - llmSettingsComponent.getCustomOpenAIModelField().setText(settings.getCustomOpenAIModel()); + llmSettingsComponent.getCustomOpenAIModelNameField().setText(settings.getCustomOpenAIModelName()); llmSettingsComponent.getOpenAIKeyField().setText(settings.getOpenAIKey()); llmSettingsComponent.getMistralApiKeyField().setText(settings.getMistralKey()); @@ -209,10 +206,10 @@ public void reset() { llmSettingsComponent.getLmStudioEnabledCheckBox().setSelected(settings.isLmStudioEnabled()); llmSettingsComponent.getGpt4AllEnabledCheckBox().setSelected(settings.isGpt4AllEnabled()); llmSettingsComponent.getJanEnabledCheckBox().setSelected(settings.isJanEnabled()); - llmSettingsComponent.getExoEnabledCheckBox().setSelected(settings.isExoEnabled()); llmSettingsComponent.getLlamaCPPEnabledCheckBox().setSelected(settings.isLlamaCPPEnabled()); - llmSettingsComponent.getJlamaEnabledCheckBox().setSelected(settings.isJlamaEnabled()); - llmSettingsComponent.getCustomOpenAIEnabledCheckBox().setSelected(settings.isCustomOpenAIEnabled()); + + llmSettingsComponent.getCustomOpenAIUrlEnabledCheckBox().setSelected(settings.isCustomOpenAIUrlEnabled()); + llmSettingsComponent.getCustomOpenAIModelNameEnabledCheckBox().setSelected(settings.isCustomOpenAIModelNameEnabled()); llmSettingsComponent.getOpenAIEnabledCheckBox().setSelected(settings.isOpenAIEnabled()); llmSettingsComponent.getMistralEnabledCheckBox().setSelected(settings.isMistralEnabled()); diff --git a/src/main/resources/META-INF/plugin.xml b/src/main/resources/META-INF/plugin.xml index 52cdd71b..296ba20c 100644 --- a/src/main/resources/META-INF/plugin.xml +++ b/src/main/resources/META-INF/plugin.xml @@ -39,6 +39,8 @@
  • Fix #196 : Continue with prompt if '/' command is unknown
  • Fix #394 : Removed Google Gemini 1.0 Pro
  • +
  • Feat #397: add a way to specify custom model name
  • +
  • Feat #400 : List "custom local model" when enabled in dropdown

V0.4.5