diff --git a/core/src/main/java/com/devoxx/genie/model/jan/Data.java b/core/src/main/java/com/devoxx/genie/model/jan/Data.java index 1ff9007c..cd58a2a8 100644 --- a/core/src/main/java/com/devoxx/genie/model/jan/Data.java +++ b/core/src/main/java/com/devoxx/genie/model/jan/Data.java @@ -15,7 +15,7 @@ public class Data { private String object; @JsonProperty("ctx_len") - private Integer ctxLen; + private Long ctxLen; @JsonProperty("max_tokens") private Integer maxTokens; diff --git a/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java b/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java index 64dc5823..b9c07eb2 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java +++ b/src/main/java/com/devoxx/genie/chatmodel/ChatModelFactoryProvider.java @@ -1,19 +1,20 @@ package com.devoxx.genie.chatmodel; -import com.devoxx.genie.chatmodel.anthropic.AnthropicChatModelFactory; -import com.devoxx.genie.chatmodel.azureopenai.AzureOpenAIChatModelFactory; -import com.devoxx.genie.chatmodel.customopenai.CustomOpenAIChatModelFactory; -import com.devoxx.genie.chatmodel.deepinfra.DeepInfraChatModelFactory; -import com.devoxx.genie.chatmodel.deepseek.DeepSeekChatModelFactory; -import com.devoxx.genie.chatmodel.google.GoogleChatModelFactory; -import com.devoxx.genie.chatmodel.gpt4all.GPT4AllChatModelFactory; -import com.devoxx.genie.chatmodel.groq.GroqChatModelFactory; -import com.devoxx.genie.chatmodel.jan.JanChatModelFactory; -import com.devoxx.genie.chatmodel.lmstudio.LMStudioChatModelFactory; -import com.devoxx.genie.chatmodel.mistral.MistralChatModelFactory; -import com.devoxx.genie.chatmodel.ollama.OllamaChatModelFactory; -import com.devoxx.genie.chatmodel.openai.OpenAIChatModelFactory; -import com.devoxx.genie.chatmodel.openrouter.OpenRouterChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.anthropic.AnthropicChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.azureopenai.AzureOpenAIChatModelFactory; +import com.devoxx.genie.chatmodel.local.customopenai.CustomOpenAIChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.deepinfra.DeepInfraChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.deepseek.DeepSeekChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.google.GoogleChatModelFactory; +import com.devoxx.genie.chatmodel.local.gpt4all.GPT4AllChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.groq.GroqChatModelFactory; +import com.devoxx.genie.chatmodel.local.jan.JanChatModelFactory; +import com.devoxx.genie.chatmodel.local.llamaCPP.LlamaChatModelFactory; +import com.devoxx.genie.chatmodel.local.lmstudio.LMStudioChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.mistral.MistralChatModelFactory; +import com.devoxx.genie.chatmodel.local.ollama.OllamaChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.openai.OpenAIChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.openrouter.OpenRouterChatModelFactory; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -50,6 +51,7 @@ private ChatModelFactoryProvider() { case "Groq" -> new GroqChatModelFactory(); case "GPT4All" -> new GPT4AllChatModelFactory(); case "Jan" -> new JanChatModelFactory(); + case "LLaMA" -> new LlamaChatModelFactory(); case "LMStudio" -> new LMStudioChatModelFactory(); case "Mistral" -> new MistralChatModelFactory(); case "Ollama" -> new OllamaChatModelFactory(); diff --git a/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java b/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java index 79ea4b22..9c2076ea 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java +++ b/src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java @@ -1,6 +1,6 @@ package com.devoxx.genie.chatmodel; -import com.devoxx.genie.chatmodel.lmstudio.LMStudioChatModelFactory; +import com.devoxx.genie.chatmodel.local.lmstudio.LMStudioChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.Constant; import com.devoxx.genie.model.LanguageModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/LocalChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/LocalChatModelFactory.java index 251b1d20..bca8d13d 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/LocalChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/LocalChatModelFactory.java @@ -25,6 +25,8 @@ public abstract class LocalChatModelFactory implements ChatModelFactory { protected List cachedModels = null; protected static final ExecutorService executorService = Executors.newFixedThreadPool(5); protected static boolean warningShown = false; + protected boolean providerRunning = false; + protected boolean providerChecked = false; protected LocalChatModelFactory(ModelProvider modelProvider) { this.modelProvider = modelProvider; @@ -62,9 +64,18 @@ protected StreamingChatLanguageModel createLocalAiStreamingChatModel(@NotNull Ch @Override public List getModels() { - if (cachedModels != null) { - return cachedModels; + if (!providerChecked) { + checkAndFetchModels(); } + if (!providerRunning) { + NotificationUtil.sendNotification(ProjectManager.getInstance().getDefaultProject(), + "LLM provider is not running. Please start it and try again."); + return List.of(); + } + return cachedModels; + } + + private void checkAndFetchModels() { List modelNames = new ArrayList<>(); List> futures = new ArrayList<>(); try { @@ -77,25 +88,28 @@ public List getModels() { modelNames.add(languageModel); } } catch (IOException e) { - handleModelFetchError(model, e); + handleModelFetchError(e); } }, executorService); futures.add(future); } CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join(); cachedModels = modelNames; + providerRunning = true; } catch (IOException e) { handleGeneralFetchError(e); cachedModels = List.of(); + providerRunning = false; + } finally { + providerChecked = true; } - return cachedModels; } protected abstract Object[] fetchModels() throws IOException; protected abstract LanguageModel buildLanguageModel(Object model) throws IOException; - protected void handleModelFetchError(Object model, @NotNull IOException e) { + protected void handleModelFetchError(@NotNull IOException e) { NotificationUtil.sendNotification(ProjectManager.getInstance().getDefaultProject(), "Error fetching model details: " + e.getMessage()); } @@ -109,5 +123,7 @@ protected void handleGeneralFetchError(IOException e) { @Override public void resetModels() { cachedModels = null; + providerChecked = false; + providerRunning = false; } } \ No newline at end of file diff --git a/src/main/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/anthropic/AnthropicChatModelFactory.java similarity index 96% rename from src/main/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/anthropic/AnthropicChatModelFactory.java index 037cebc5..b8c10b4e 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/anthropic/AnthropicChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.anthropic; +package com.devoxx.genie.chatmodel.cloud.anthropic; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAIChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/azureopenai/AzureOpenAIChatModelFactory.java similarity index 98% rename from src/main/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAIChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/azureopenai/AzureOpenAIChatModelFactory.java index b47e5bd8..09843225 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAIChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/azureopenai/AzureOpenAIChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.azureopenai; +package com.devoxx.genie.chatmodel.cloud.azureopenai; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/deepinfra/DeepInfraChatModelFactory.java similarity index 97% rename from src/main/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/deepinfra/DeepInfraChatModelFactory.java index f00ddb57..27ed4fd4 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/deepinfra/DeepInfraChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.deepinfra; +package com.devoxx.genie.chatmodel.cloud.deepinfra; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/deepseek/DeepSeekChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/deepseek/DeepSeekChatModelFactory.java similarity index 97% rename from src/main/java/com/devoxx/genie/chatmodel/deepseek/DeepSeekChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/deepseek/DeepSeekChatModelFactory.java index fec1eb62..85308a86 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/deepseek/DeepSeekChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/deepseek/DeepSeekChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.deepseek; +package com.devoxx.genie.chatmodel.cloud.deepseek; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/google/GoogleChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/google/GoogleChatModelFactory.java similarity index 95% rename from src/main/java/com/devoxx/genie/chatmodel/google/GoogleChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/google/GoogleChatModelFactory.java index 671d6657..339f6c72 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/google/GoogleChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/google/GoogleChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.google; +package com.devoxx.genie.chatmodel.cloud.google; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/groq/GroqChatModelFactory.java similarity index 96% rename from src/main/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/groq/GroqChatModelFactory.java index 16bcc8b2..4d9f864b 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/groq/GroqChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.groq; +package com.devoxx.genie.chatmodel.cloud.groq; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/mistral/MistralChatModelFactory.java similarity index 97% rename from src/main/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/mistral/MistralChatModelFactory.java index a4593a17..b9260750 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/mistral/MistralChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.mistral; +package com.devoxx.genie.chatmodel.cloud.mistral; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/openai/OpenAIChatModelFactory.java similarity index 98% rename from src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/openai/OpenAIChatModelFactory.java index 3f8e56e5..b3a455fd 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/openai/OpenAIChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/openai/OpenAIChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.openai; +package com.devoxx.genie.chatmodel.cloud.openai; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/cloud/openrouter/OpenRouterChatModelFactory.java similarity index 98% rename from src/main/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/cloud/openrouter/OpenRouterChatModelFactory.java index 10ec5035..9e7b7262 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/cloud/openrouter/OpenRouterChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.openrouter; +package com.devoxx.genie.chatmodel.cloud.openrouter; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/local/LocalLLMProvider.java b/src/main/java/com/devoxx/genie/chatmodel/local/LocalLLMProvider.java new file mode 100644 index 00000000..664be702 --- /dev/null +++ b/src/main/java/com/devoxx/genie/chatmodel/local/LocalLLMProvider.java @@ -0,0 +1,7 @@ +package com.devoxx.genie.chatmodel.local; + +import java.io.IOException; + +public interface LocalLLMProvider { + Object getModels() throws IOException; +} diff --git a/src/main/java/com/devoxx/genie/chatmodel/local/LocalLLMProviderUtil.java b/src/main/java/com/devoxx/genie/chatmodel/local/LocalLLMProviderUtil.java new file mode 100644 index 00000000..f6ef3f38 --- /dev/null +++ b/src/main/java/com/devoxx/genie/chatmodel/local/LocalLLMProviderUtil.java @@ -0,0 +1,54 @@ +package com.devoxx.genie.chatmodel.local; + +import com.devoxx.genie.model.lmstudio.LMStudioModelEntryDTO; +import com.devoxx.genie.service.exception.UnsuccessfulRequestException; +import com.devoxx.genie.ui.settings.DevoxxGenieStateService; +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; + +import java.io.IOException; +import java.util.Objects; + +import static com.devoxx.genie.util.HttpUtil.ensureEndsWithSlash; + +public class LocalLLMProviderUtil { + + private static final OkHttpClient client = new OkHttpClient(); + private static final Gson gson = new Gson(); + + public static T getModels(String baseUrlConfigKey, String endpoint, Class responseType) throws IOException { + String configValue = DevoxxGenieStateService.getInstance().getConfigValue(baseUrlConfigKey); + String baseUrl = ensureEndsWithSlash(Objects.requireNonNull(configValue)); + + Request request = new Request.Builder() + .url(baseUrl + endpoint) + .build(); + + try (Response response = client.newCall(request).execute()) { + if (!response.isSuccessful()) { + throw new UnsuccessfulRequestException("Unexpected code " + response); + } + + if (response.body() == null) { + throw new UnsuccessfulRequestException("Response body is null"); + } + + String json = response.body().string(); + + // Special handling for LM Studio + if (responseType.equals(LMStudioModelEntryDTO[].class)) { + JsonElement jsonElement = gson.fromJson(json, JsonElement.class); + if (jsonElement.isJsonObject() && jsonElement.getAsJsonObject().has("data")) { + return gson.fromJson(jsonElement.getAsJsonObject().get("data"), responseType); + } else { + return responseType.cast(new LMStudioModelEntryDTO[0]); + } + } + + return gson.fromJson(json, responseType); + } + } +} diff --git a/src/main/java/com/devoxx/genie/chatmodel/customopenai/CustomOpenAIChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/local/customopenai/CustomOpenAIChatModelFactory.java similarity index 97% rename from src/main/java/com/devoxx/genie/chatmodel/customopenai/CustomOpenAIChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/local/customopenai/CustomOpenAIChatModelFactory.java index 984d37c2..a33ca71c 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/customopenai/CustomOpenAIChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/customopenai/CustomOpenAIChatModelFactory.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.customopenai; +package com.devoxx.genie.chatmodel.local.customopenai; import com.devoxx.genie.chatmodel.ChatModelFactory; import com.devoxx.genie.model.ChatModel; diff --git a/src/main/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/local/gpt4all/GPT4AllChatModelFactory.java similarity index 84% rename from src/main/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/local/gpt4all/GPT4AllChatModelFactory.java index 3b19cf64..aef0ea91 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/gpt4all/GPT4AllChatModelFactory.java @@ -1,11 +1,10 @@ -package com.devoxx.genie.chatmodel.gpt4all; +package com.devoxx.genie.chatmodel.local.gpt4all; import com.devoxx.genie.chatmodel.LocalChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.model.enumarations.ModelProvider; import com.devoxx.genie.model.gpt4all.Model; -import com.devoxx.genie.service.gpt4all.GPT4AllService; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.chat.StreamingChatLanguageModel; @@ -36,20 +35,19 @@ protected String getModelUrl() { @Override protected Model[] fetchModels() throws IOException { - return GPT4AllService.getInstance().getModels().toArray(new Model[0]); + return GPT4AllModelService.getInstance().getModels().toArray(new Model[0]); } @Override protected LanguageModel buildLanguageModel(Object model) { Model gpt4AllModel = (Model) model; - // int contextWindow = GPT4AllService.getInstance() return LanguageModel.builder() .provider(modelProvider) .modelName(gpt4AllModel.getId()) .displayName(gpt4AllModel.getId()) .inputCost(0) .outputCost(0) - // .contextWindow(contextWindow) + // .contextWindow(contextWindow) // GPT4All does not provide context window :( .apiKeyUsed(false) .build(); } diff --git a/src/main/java/com/devoxx/genie/chatmodel/local/gpt4all/GPT4AllModelService.java b/src/main/java/com/devoxx/genie/chatmodel/local/gpt4all/GPT4AllModelService.java new file mode 100644 index 00000000..6d51c935 --- /dev/null +++ b/src/main/java/com/devoxx/genie/chatmodel/local/gpt4all/GPT4AllModelService.java @@ -0,0 +1,26 @@ +package com.devoxx.genie.chatmodel.local.gpt4all; + +import com.devoxx.genie.chatmodel.local.LocalLLMProvider; +import com.devoxx.genie.chatmodel.local.LocalLLMProviderUtil; +import com.devoxx.genie.model.gpt4all.Model; +import com.devoxx.genie.model.gpt4all.ResponseDTO; +import com.intellij.openapi.application.ApplicationManager; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.util.List; + +public class GPT4AllModelService implements LocalLLMProvider { + + @NotNull + public static GPT4AllModelService getInstance() { + return ApplicationManager.getApplication().getService(GPT4AllModelService.class); + } + + @Override + public List getModels() throws IOException { + return LocalLLMProviderUtil + .getModels("gpt4allModelUrl", "models", ResponseDTO.class) + .getData(); + } +} diff --git a/src/main/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/local/jan/JanChatModelFactory.java similarity index 91% rename from src/main/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/local/jan/JanChatModelFactory.java index f65c55b4..af0fe471 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/jan/JanChatModelFactory.java @@ -1,11 +1,10 @@ -package com.devoxx.genie.chatmodel.jan; +package com.devoxx.genie.chatmodel.local.jan; import com.devoxx.genie.chatmodel.LocalChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.model.enumarations.ModelProvider; import com.devoxx.genie.model.jan.Data; -import com.devoxx.genie.service.jan.JanService; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.chat.StreamingChatLanguageModel; @@ -36,7 +35,7 @@ protected String getModelUrl() { @Override protected Data[] fetchModels() throws IOException { - return JanService.getInstance().getModels().toArray(new Data[0]); + return JanModelService.getInstance().getModels().toArray(new Data[0]); } @Override diff --git a/src/main/java/com/devoxx/genie/chatmodel/local/jan/JanModelService.java b/src/main/java/com/devoxx/genie/chatmodel/local/jan/JanModelService.java new file mode 100644 index 00000000..7f756b43 --- /dev/null +++ b/src/main/java/com/devoxx/genie/chatmodel/local/jan/JanModelService.java @@ -0,0 +1,26 @@ +package com.devoxx.genie.chatmodel.local.jan; + +import com.devoxx.genie.chatmodel.local.LocalLLMProvider; +import com.devoxx.genie.chatmodel.local.LocalLLMProviderUtil; +import com.devoxx.genie.model.jan.Data; +import com.devoxx.genie.model.jan.ResponseDTO; +import com.intellij.openapi.application.ApplicationManager; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.util.List; + +public class JanModelService implements LocalLLMProvider { + + @NotNull + public static JanModelService getInstance() { + return ApplicationManager.getApplication().getService(JanModelService.class); + } + + @Override + public List getModels() throws IOException { + return LocalLLMProviderUtil + .getModels("janModelUrl", "models", ResponseDTO.class) + .getData(); + } +} diff --git a/src/main/java/com/devoxx/genie/chatmodel/local/llamaCPP/LlamaChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/local/llamaCPP/LlamaChatModelFactory.java new file mode 100644 index 00000000..b7dba021 --- /dev/null +++ b/src/main/java/com/devoxx/genie/chatmodel/local/llamaCPP/LlamaChatModelFactory.java @@ -0,0 +1,47 @@ +package com.devoxx.genie.chatmodel.local.llamaCPP; + +import com.devoxx.genie.chatmodel.ChatModelFactory; +import com.devoxx.genie.model.ChatModel; +import com.devoxx.genie.model.LanguageModel; +import com.devoxx.genie.model.enumarations.ModelProvider; +import com.devoxx.genie.ui.settings.DevoxxGenieStateService; +import dev.langchain4j.model.chat.ChatLanguageModel; +import dev.langchain4j.model.localai.LocalAiChatModel; +import org.jetbrains.annotations.NotNull; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; + +public class LlamaChatModelFactory implements ChatModelFactory { + + @Override + public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) { + return LocalAiChatModel.builder() + .baseUrl(DevoxxGenieStateService.getInstance().getLlamaCPPUrl()) + .modelName(chatModel.getModelName()) + .temperature(chatModel.getTemperature()) + .topP(chatModel.getTopP()) + .maxRetries(chatModel.getMaxRetries()) + .timeout(Duration.ofSeconds(chatModel.getTimeout())) + .build(); + } + + @Override + public List getModels() { + LanguageModel lmStudio = LanguageModel.builder() + .provider(ModelProvider.LLaMA) + .modelName(TEST_MODEL) + .displayName(TEST_MODEL) + .inputCost(0) + .outputCost(0) + .contextWindow(8000) + .apiKeyUsed(false) + .build(); + + List modelNames = new ArrayList<>(); + modelNames.add(lmStudio); + return modelNames; + } +} + diff --git a/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModel.java b/src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioChatModel.java similarity index 98% rename from src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModel.java rename to src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioChatModel.java index 1da71d85..9246433d 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModel.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioChatModel.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.chatmodel.lmstudio; +package com.devoxx.genie.chatmodel.local.lmstudio; import dev.ai4j.openai4j.OpenAiClient; import dev.ai4j.openai4j.chat.ChatCompletionRequest; diff --git a/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioChatModelFactory.java similarity index 78% rename from src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioChatModelFactory.java index d711abc9..ac51e2b1 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioChatModelFactory.java @@ -1,15 +1,11 @@ -package com.devoxx.genie.chatmodel.lmstudio; +package com.devoxx.genie.chatmodel.local.lmstudio; import com.devoxx.genie.chatmodel.LocalChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.model.enumarations.ModelProvider; import com.devoxx.genie.model.lmstudio.LMStudioModelEntryDTO; -import com.devoxx.genie.service.lmstudio.LMStudioService; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; -import com.devoxx.genie.ui.util.NotificationUtil; -import com.devoxx.genie.util.LMStudioUtil; -import com.intellij.openapi.project.ProjectManager; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import org.jetbrains.annotations.NotNull; @@ -50,12 +46,7 @@ protected String getModelUrl() { @Override protected LMStudioModelEntryDTO[] fetchModels() throws IOException { - if (!LMStudioUtil.isLMStudioRunning()) { - NotificationUtil.sendNotification(ProjectManager.getInstance().getDefaultProject(), - "LMStudio is not running. Please start it and try again."); - throw new IOException("LMStudio is not running"); - } - return LMStudioService.getInstance().getModels(); + return LMStudioModelService.getInstance().getModels(); } @Override diff --git a/src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioModelService.java b/src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioModelService.java new file mode 100644 index 00000000..6d9f5fea --- /dev/null +++ b/src/main/java/com/devoxx/genie/chatmodel/local/lmstudio/LMStudioModelService.java @@ -0,0 +1,23 @@ +package com.devoxx.genie.chatmodel.local.lmstudio; + +import com.devoxx.genie.chatmodel.local.LocalLLMProvider; +import com.devoxx.genie.chatmodel.local.LocalLLMProviderUtil; +import com.devoxx.genie.model.lmstudio.LMStudioModelEntryDTO; +import com.intellij.openapi.application.ApplicationManager; +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; + +public class LMStudioModelService implements LocalLLMProvider { + + @NotNull + public static LMStudioModelService getInstance() { + return ApplicationManager.getApplication().getService(LMStudioModelService.class); + } + + @Override + public LMStudioModelEntryDTO[] getModels() throws IOException { + return LocalLLMProviderUtil + .getModels("lmStudioModelUrl", "models", LMStudioModelEntryDTO[].class); + } +} diff --git a/src/main/java/com/devoxx/genie/service/ollama/OllamaApiService.java b/src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaApiService.java similarity index 97% rename from src/main/java/com/devoxx/genie/service/ollama/OllamaApiService.java rename to src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaApiService.java index cb469c2e..57c3851c 100644 --- a/src/main/java/com/devoxx/genie/service/ollama/OllamaApiService.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaApiService.java @@ -1,4 +1,4 @@ -package com.devoxx.genie.service.ollama; +package com.devoxx.genie.chatmodel.local.ollama; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import com.google.gson.Gson; diff --git a/src/main/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactory.java b/src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaChatModelFactory.java similarity index 92% rename from src/main/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactory.java rename to src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaChatModelFactory.java index e4424cc4..b9455d3c 100644 --- a/src/main/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactory.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaChatModelFactory.java @@ -1,12 +1,10 @@ -package com.devoxx.genie.chatmodel.ollama; +package com.devoxx.genie.chatmodel.local.ollama; import com.devoxx.genie.chatmodel.LocalChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.model.enumarations.ModelProvider; import com.devoxx.genie.model.ollama.OllamaModelEntryDTO; -import com.devoxx.genie.service.ollama.OllamaApiService; -import com.devoxx.genie.service.ollama.OllamaService; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.chat.StreamingChatLanguageModel; @@ -53,7 +51,7 @@ protected String getModelUrl() { @Override protected OllamaModelEntryDTO[] fetchModels() throws IOException { - return OllamaService.getInstance().getModels(); + return OllamaModelService.getInstance().getModels(); } @Override diff --git a/src/main/java/com/devoxx/genie/service/ollama/OllamaService.java b/src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaModelService.java similarity index 74% rename from src/main/java/com/devoxx/genie/service/ollama/OllamaService.java rename to src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaModelService.java index 2d319048..8b5436d1 100644 --- a/src/main/java/com/devoxx/genie/service/ollama/OllamaService.java +++ b/src/main/java/com/devoxx/genie/chatmodel/local/ollama/OllamaModelService.java @@ -1,5 +1,7 @@ -package com.devoxx.genie.service.ollama; +package com.devoxx.genie.chatmodel.local.ollama; +import com.devoxx.genie.chatmodel.local.LocalLLMProvider; +import com.devoxx.genie.chatmodel.local.LocalLLMProviderUtil; import com.devoxx.genie.model.ollama.OllamaModelDTO; import com.devoxx.genie.model.ollama.OllamaModelEntryDTO; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; @@ -16,42 +18,30 @@ import static com.devoxx.genie.util.HttpUtil.ensureEndsWithSlash; -public class OllamaService { +public class OllamaModelService implements LocalLLMProvider { private final OkHttpClient client = new OkHttpClient(); private final Gson gson = new Gson(); private final MediaType JSON = MediaType.parse("application/json"); @NotNull - public static OllamaService getInstance() { - return ApplicationManager.getApplication().getService(OllamaService.class); + public static OllamaModelService getInstance() { + return ApplicationManager.getApplication().getService(OllamaModelService.class); } - /** - * Get the models from the Ollama service. - * - * @return array of model names - * @throws IOException if there is an error - */ + @Override public OllamaModelEntryDTO[] getModels() throws IOException { - String baseUrl = ensureEndsWithSlash(DevoxxGenieStateService.getInstance().getOllamaModelUrl()); - - Request request = new Request.Builder() - .url(baseUrl + "api/tags") - .build(); - - try (Response response = client.newCall(request).execute()) { - if (!response.isSuccessful()) { - throw new UnsuccessfulRequestException("Unexpected code " + response); - } - - assert response.body() != null; - - OllamaModelDTO ollamaModelDTO = new Gson().fromJson(response.body().string(), OllamaModelDTO.class); - return ollamaModelDTO != null && ollamaModelDTO.getModels() != null ? ollamaModelDTO.getModels() : new OllamaModelEntryDTO[0]; - } + return LocalLLMProviderUtil + .getModels("ollamaModelUrl", "api/tags", OllamaModelDTO.class) + .getModels(); } + /** + * Pulls the model from the Ollama server for RAG support. + * @param modelName the name of the model to pull + * @param statusCallback a callback to receive status updates + * @throws IOException if an error occurs during the request + */ public void pullModel(String modelName, Consumer statusCallback) throws IOException { String baseUrl = ensureEndsWithSlash(DevoxxGenieStateService.getInstance().getOllamaModelUrl()); diff --git a/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java b/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java index 558b25fa..efba2894 100644 --- a/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java +++ b/src/main/java/com/devoxx/genie/service/LLMModelRegistryService.java @@ -1,6 +1,6 @@ package com.devoxx.genie.service; -import com.devoxx.genie.chatmodel.openrouter.OpenRouterChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.openrouter.OpenRouterChatModelFactory; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.model.enumarations.ModelProvider; import com.intellij.openapi.application.ApplicationManager; diff --git a/src/main/java/com/devoxx/genie/service/gpt4all/GPT4AllService.java b/src/main/java/com/devoxx/genie/service/gpt4all/GPT4AllService.java deleted file mode 100644 index bb73f4ba..00000000 --- a/src/main/java/com/devoxx/genie/service/gpt4all/GPT4AllService.java +++ /dev/null @@ -1,55 +0,0 @@ -package com.devoxx.genie.service.gpt4all; - -import com.devoxx.genie.model.gpt4all.Model; -import com.devoxx.genie.model.gpt4all.ResponseDTO; -import com.devoxx.genie.ui.settings.DevoxxGenieStateService; -import com.google.gson.Gson; -import com.intellij.openapi.application.ApplicationManager; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; -import org.jetbrains.annotations.NotNull; - -import java.io.IOException; -import java.util.List; - -import static com.devoxx.genie.util.HttpUtil.ensureEndsWithSlash; - -public class GPT4AllService { - private final OkHttpClient client = new OkHttpClient(); - - @NotNull - public static GPT4AllService getInstance() { - return ApplicationManager.getApplication().getService(GPT4AllService.class); - } - - /** - * Get the models from the GPT4All service. - * @return array of model names - * @throws IOException if there is an error - */ - public List getModels() throws IOException { - String baseUrl = ensureEndsWithSlash(DevoxxGenieStateService.getInstance().getGpt4allModelUrl()); - - Request request = new Request.Builder() - .url(baseUrl + "models") - .build(); - - try (Response response = client.newCall(request).execute()) { - if (!response.isSuccessful()) { - throw new UnsuccessfulRequestException("Unexpected code " + response); - } - - assert response.body() != null; - - ResponseDTO modelResponse = new Gson().fromJson(response.body().string(), ResponseDTO.class); - return modelResponse != null && modelResponse.getData() != null ? modelResponse.getData() : List.of(); - } - } - - public static class UnsuccessfulRequestException extends IOException { - public UnsuccessfulRequestException(String message) { - super(message); - } - } -} diff --git a/src/main/java/com/devoxx/genie/service/jan/JanService.java b/src/main/java/com/devoxx/genie/service/jan/JanService.java deleted file mode 100644 index c9d84284..00000000 --- a/src/main/java/com/devoxx/genie/service/jan/JanService.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.devoxx.genie.service.jan; - -import com.devoxx.genie.model.jan.Data; -import com.devoxx.genie.model.jan.ResponseDTO; -import com.devoxx.genie.ui.settings.DevoxxGenieStateService; -import com.google.gson.Gson; -import com.intellij.openapi.application.ApplicationManager; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Response; -import org.jetbrains.annotations.NotNull; - -import java.io.IOException; -import java.util.List; - -import static com.devoxx.genie.util.HttpUtil.ensureEndsWithSlash; - -public class JanService { - private final OkHttpClient client = new OkHttpClient(); - - @NotNull - public static JanService getInstance() { - return ApplicationManager.getApplication().getService(JanService.class); - } - - public List getModels() throws IOException { - String baseUrl = ensureEndsWithSlash(DevoxxGenieStateService.getInstance().getJanModelUrl()); - - Request request = new Request.Builder() - .url(baseUrl + "models") - .build(); - - try (Response response = client.newCall(request).execute()) { - if (!response.isSuccessful()) { - throw new UnsuccessfulRequestException("Unexpected code " + response); - } - - assert response.body() != null; - - ResponseDTO responseDTO = new Gson().fromJson(response.body().string(), ResponseDTO.class); - return responseDTO != null && responseDTO.getData() != null ? responseDTO.getData() : List.of(); - } - } - - public static class UnsuccessfulRequestException extends IOException { - public UnsuccessfulRequestException(String message) { - super(message); - } - } -} diff --git a/src/main/java/com/devoxx/genie/service/lmstudio/LMStudioService.java b/src/main/java/com/devoxx/genie/service/lmstudio/LMStudioService.java deleted file mode 100644 index 9e5a8024..00000000 --- a/src/main/java/com/devoxx/genie/service/lmstudio/LMStudioService.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.devoxx.genie.service.lmstudio; - -import com.devoxx.genie.model.lmstudio.LMStudioModelDTO; -import com.devoxx.genie.model.lmstudio.LMStudioModelEntryDTO; -import com.devoxx.genie.service.exception.UnsuccessfulRequestException; -import com.devoxx.genie.util.LMStudioUtil; -import com.google.gson.Gson; -import com.intellij.openapi.application.ApplicationManager; -import okhttp3.Response; -import org.jetbrains.annotations.NotNull; - -import java.io.IOException; - -public class LMStudioService { - - @NotNull - public static LMStudioService getInstance() { - return ApplicationManager.getApplication().getService(LMStudioService.class); - } - - public LMStudioModelEntryDTO[] getModels() throws IOException { - try (Response response = LMStudioUtil.executeRequest("models")) { - if (!response.isSuccessful()) { - throw new UnsuccessfulRequestException("Unexpected code " + response); - } - - if (response.body() == null) { - throw new UnsuccessfulRequestException("Response is empty"); - } - - LMStudioModelDTO lmStudioModelDTO = new Gson().fromJson(response.body().string(), LMStudioModelDTO.class); - return lmStudioModelDTO != null && - lmStudioModelDTO.getData() != null ? lmStudioModelDTO.getData() : new LMStudioModelEntryDTO[0]; - } - } -} diff --git a/src/main/java/com/devoxx/genie/service/rag/validator/NomicEmbedTextValidator.java b/src/main/java/com/devoxx/genie/service/rag/validator/NomicEmbedTextValidator.java index 0bf6bb27..41608c67 100644 --- a/src/main/java/com/devoxx/genie/service/rag/validator/NomicEmbedTextValidator.java +++ b/src/main/java/com/devoxx/genie/service/rag/validator/NomicEmbedTextValidator.java @@ -1,7 +1,7 @@ package com.devoxx.genie.service.rag.validator; import com.devoxx.genie.model.ollama.OllamaModelEntryDTO; -import com.devoxx.genie.service.ollama.OllamaService; +import com.devoxx.genie.chatmodel.local.ollama.OllamaModelService; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; public class NomicEmbedTextValidator implements Validator { @@ -16,7 +16,7 @@ public boolean isValid() { return false; } try { - OllamaModelEntryDTO[] ollamaModels = OllamaService.getInstance().getModels(); + OllamaModelEntryDTO[] ollamaModels = OllamaModelService.getInstance().getModels(); if (ollamaModels == null) { this.message = "Unable to check if Nomic Embed model is present"; return false; diff --git a/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java b/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java index 401ee5f2..8759a168 100644 --- a/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java +++ b/src/main/java/com/devoxx/genie/ui/panel/LlmProviderPanel.java @@ -148,9 +148,9 @@ private void refreshModels() { } if (selectedProvider == ModelProvider.LMStudio || - selectedProvider == ModelProvider.Ollama || - selectedProvider == ModelProvider.Jan || - selectedProvider == ModelProvider.GPT4All) { + selectedProvider == ModelProvider.Ollama || + selectedProvider == ModelProvider.Jan || + selectedProvider == ModelProvider.GPT4All) { ApplicationManager.getApplication().invokeLater(() -> { refreshButton.setEnabled(false); @@ -223,7 +223,6 @@ private void hideModelNameComboBox() { modelNameComboBox.setVisible(false); } - /** * Restore the last selected provider from persistent storage */ @@ -280,7 +279,9 @@ public void llmSettingsChanged() { * Set the model provider and update the model names. */ private void handleModelProviderSelectionChange(@NotNull ActionEvent e) { - if (!e.getActionCommand().equals(Constant.COMBO_BOX_CHANGED) || !isInitializationComplete || isUpdatingModelNames) + if (!e.getActionCommand().equals(Constant.COMBO_BOX_CHANGED) || + !isInitializationComplete || + isUpdatingModelNames) return; isUpdatingModelNames = true; diff --git a/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java b/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java index 63eca67c..80a12e98 100644 --- a/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java +++ b/src/main/java/com/devoxx/genie/ui/settings/DevoxxGenieStateService.java @@ -15,6 +15,7 @@ import lombok.Setter; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import java.util.*; @@ -265,4 +266,14 @@ public boolean isAzureOpenAIEnabled() { !azureOpenAIEndpoint.isEmpty() && !azureOpenAIDeployment.isEmpty(); } + + public @Nullable String getConfigValue(@NotNull String key) { + return switch (key) { + case "janModelUrl" -> getJanModelUrl(); + case "gpt4allModelUrl" -> getGpt4allModelUrl(); + case "lmStudioModelUrl" -> getLmstudioModelUrl(); + case "ollamaModelUrl" -> getOllamaModelUrl(); + default -> null; + }; + } } diff --git a/src/main/java/com/devoxx/genie/ui/settings/rag/RAGSettingsHandler.java b/src/main/java/com/devoxx/genie/ui/settings/rag/RAGSettingsHandler.java index d73ded64..6c2c1193 100644 --- a/src/main/java/com/devoxx/genie/ui/settings/rag/RAGSettingsHandler.java +++ b/src/main/java/com/devoxx/genie/ui/settings/rag/RAGSettingsHandler.java @@ -2,7 +2,7 @@ import com.devoxx.genie.service.chromadb.ChromaDBManager; import com.devoxx.genie.service.chromadb.ChromaDBStatusCallback; -import com.devoxx.genie.service.ollama.OllamaService; +import com.devoxx.genie.chatmodel.local.ollama.OllamaModelService; import com.devoxx.genie.service.rag.RagValidatorService; import com.devoxx.genie.service.rag.validator.ValidationActionType; import com.devoxx.genie.service.rag.validator.ValidationResult; @@ -122,7 +122,7 @@ public void run(@NotNull ProgressIndicator indicator) { indicator.setIndeterminate(false); try { - OllamaService.getInstance().pullModel("nomic-embed-text", status -> { + OllamaModelService.getInstance().pullModel("nomic-embed-text", status -> { if (status.startsWith("Downloading:")) { try { double progress = Double.parseDouble(status.substring(12, status.length() - 1)); diff --git a/src/main/java/com/devoxx/genie/util/LMStudioUtil.java b/src/main/java/com/devoxx/genie/util/LocalProviderUtil.java similarity index 63% rename from src/main/java/com/devoxx/genie/util/LMStudioUtil.java rename to src/main/java/com/devoxx/genie/util/LocalProviderUtil.java index a20c76c6..dd850f18 100644 --- a/src/main/java/com/devoxx/genie/util/LMStudioUtil.java +++ b/src/main/java/com/devoxx/genie/util/LocalProviderUtil.java @@ -1,6 +1,5 @@ package com.devoxx.genie.util; -import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import com.intellij.openapi.diagnostic.Logger; import okhttp3.OkHttpClient; import okhttp3.Request; @@ -12,8 +11,8 @@ import static com.devoxx.genie.util.HttpUtil.ensureEndsWithSlash; -public class LMStudioUtil { - private static final Logger LOG = Logger.getInstance(LMStudioUtil.class); +public class LocalProviderUtil { + private static final Logger LOG = Logger.getInstance(LocalProviderUtil.class); private static final OkHttpClient client = new OkHttpClient.Builder() .connectTimeout(Duration.ofSeconds(5)) @@ -21,8 +20,8 @@ public class LMStudioUtil { .writeTimeout(Duration.ofSeconds(5)) .build(); - public static boolean isLMStudioRunning() { - try (Response response = executeRequest("models")) { + public static boolean isProviderRunning(String baseUrl) { + try (Response response = executeRequest(baseUrl, "models")) { return response.isSuccessful(); } catch (IOException e) { LOG.warn("Failed to connect to LMStudio: " + e.getMessage()); @@ -30,10 +29,10 @@ public static boolean isLMStudioRunning() { } } - public static @NotNull Response executeRequest(String endpoint) throws IOException { - String baseUrl = ensureEndsWithSlash(DevoxxGenieStateService.getInstance().getLmstudioModelUrl()); + public static @NotNull Response executeRequest(String baseUrl, String endpoint) throws IOException { + String url = ensureEndsWithSlash(baseUrl); Request request = new Request.Builder() - .url(baseUrl + endpoint) + .url(url + endpoint) .build(); return client.newCall(request).execute(); } diff --git a/src/main/resources/META-INF/plugin.xml b/src/main/resources/META-INF/plugin.xml index 60f2eda8..aa684fd0 100644 --- a/src/main/resources/META-INF/plugin.xml +++ b/src/main/resources/META-INF/plugin.xml @@ -39,11 +39,12 @@
  • Fix #196 : Continue with prompt if '/' command is unknown
  • Fix #394 : Removed Google Gemini 1.0 Pro
  • -
  • Feat #397: add a way to specify custom model name
  • +
  • Feat #397: Support custom model name for OpenAI compliant provider
  • Feat #400 : List "custom local model" when enabled in dropdown
  • Feat #400 : Removed Exo & JLama because they can use the Custom OpenAI-compliant local provider
  • Fix #399 : Fixed UserMessage GPT4ALL issue
  • Feat #402 : Allow all LLM Providers to have the "Add Project" feature
  • +
  • Feat #406 : Show "Jan" model in combobox

V0.4.5

    @@ -492,17 +493,17 @@ - - + + - + - + diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 0b3e0d51..83c9a962 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,2 +1,2 @@ -#Tue Dec 17 19:51:55 CET 2024 +#Tue Dec 17 21:10:46 CET 2024 version=0.4.6 diff --git a/src/test/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactoryTest.java index a9318317..b6530b03 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/anthropic/AnthropicChatModelFactoryTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.chatmodel.anthropic; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.cloud.anthropic.AnthropicChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; diff --git a/src/test/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAiChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAiChatModelFactoryTest.java index 2638ea3d..1c7e0ae1 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAiChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/azureopenai/AzureOpenAiChatModelFactoryTest.java @@ -1,7 +1,7 @@ package com.devoxx.genie.chatmodel.azureopenai; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; -import com.devoxx.genie.chatmodel.openai.OpenAIChatModelFactory; +import com.devoxx.genie.chatmodel.cloud.azureopenai.AzureOpenAIChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; @@ -14,7 +14,6 @@ import java.util.List; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.InstanceOfAssertFactories.set; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/src/test/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactoryTest.java index 275e26f5..b73120ef 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/deepinfra/DeepInfraChatModelFactoryTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.chatmodel.deepinfra; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.cloud.deepinfra.DeepInfraChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; diff --git a/src/test/java/com/devoxx/genie/chatmodel/google/GeminiChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/google/GeminiChatModelFactoryTest.java index 9d0fcc17..2d82d101 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/google/GeminiChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/google/GeminiChatModelFactoryTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.chatmodel.google; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.cloud.google.GoogleChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; diff --git a/src/test/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactoryTest.java index 850d7b38..7c018c4a 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/gpt4all/GPT4AllChatModelFactoryTest.java @@ -1,5 +1,6 @@ package com.devoxx.genie.chatmodel.gpt4all; +import com.devoxx.genie.chatmodel.local.gpt4all.GPT4AllChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import dev.langchain4j.model.chat.ChatLanguageModel; diff --git a/src/test/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactoryTest.java index 885afdb8..b796acbc 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/groq/GroqChatModelFactoryTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.chatmodel.groq; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.cloud.groq.GroqChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; diff --git a/src/test/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactoryTest.java index a5bf5cf8..6df9495b 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/jan/JanChatModelFactoryTest.java @@ -1,5 +1,6 @@ package com.devoxx.genie.chatmodel.jan; +import com.devoxx.genie.chatmodel.local.jan.JanChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import dev.langchain4j.model.chat.ChatLanguageModel; diff --git a/src/test/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactoryTest.java index 6ad360cf..affb9fc5 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/lmstudio/LMStudioChatModelFactoryTest.java @@ -1,5 +1,6 @@ package com.devoxx.genie.chatmodel.lmstudio; +import com.devoxx.genie.chatmodel.local.lmstudio.LMStudioChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import dev.langchain4j.model.chat.ChatLanguageModel; diff --git a/src/test/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactoryTest.java index 1409b904..6b3014b5 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/mistral/MistralChatModelFactoryTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.chatmodel.mistral; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.cloud.mistral.MistralChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; diff --git a/src/test/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactoryTest.java index 997ff876..95f9e875 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/ollama/OllamaChatModelFactoryTest.java @@ -1,5 +1,6 @@ package com.devoxx.genie.chatmodel.ollama; +import com.devoxx.genie.chatmodel.local.ollama.OllamaChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import dev.langchain4j.model.chat.ChatLanguageModel; diff --git a/src/test/java/com/devoxx/genie/chatmodel/openai/OpenAiChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/openai/OpenAiChatModelFactoryTest.java index a2b34d6e..ac4c8193 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/openai/OpenAiChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/openai/OpenAiChatModelFactoryTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.chatmodel.openai; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.cloud.openai.OpenAIChatModelFactory; import com.devoxx.genie.model.ChatModel; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; diff --git a/src/test/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactoryTest.java b/src/test/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactoryTest.java index dee24d2c..8cbdadca 100644 --- a/src/test/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactoryTest.java +++ b/src/test/java/com/devoxx/genie/chatmodel/openrouter/OpenRouterChatModelFactoryTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.chatmodel.openrouter; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.cloud.openrouter.OpenRouterChatModelFactory; import com.devoxx.genie.model.LanguageModel; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import com.intellij.openapi.application.ApplicationManager; diff --git a/src/test/java/com/devoxx/genie/service/jan/JanServiceTest.java b/src/test/java/com/devoxx/genie/service/jan/JanServiceTest.java index 887e4504..6489d7f3 100644 --- a/src/test/java/com/devoxx/genie/service/jan/JanServiceTest.java +++ b/src/test/java/com/devoxx/genie/service/jan/JanServiceTest.java @@ -1,6 +1,7 @@ package com.devoxx.genie.service.jan; import com.devoxx.genie.chatmodel.AbstractLightPlatformTestCase; +import com.devoxx.genie.chatmodel.local.jan.JanModelService; import com.devoxx.genie.model.jan.Data; import com.devoxx.genie.ui.settings.DevoxxGenieStateService; import com.intellij.openapi.application.ApplicationManager; @@ -29,7 +30,7 @@ public void setUp() throws Exception { @Test public void testGetModels() throws IOException { - JanService janService = new JanService(); + JanModelService janService = new JanModelService(); List models = janService.getModels(); assertThat(models).isNotEmpty();