Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

List 'custom local model' when enabled in dropdown + Removed Exo & JL… #401

Merged
merged 1 commit into from
Dec 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,23 +8,23 @@

@Getter
public enum ModelProvider {
Ollama("Ollama", Type.LOCAL),
LMStudio("LMStudio", Type.LOCAL),
CustomOpenAI("CustomOpenAI", Type.LOCAL),
GPT4All("GPT4All", Type.LOCAL),
Jan("Jan", Type.LOCAL),
LLaMA("LLaMA.c++", Type.LOCAL),
LMStudio("LMStudio", Type.LOCAL),
Ollama("Ollama", Type.LOCAL),

OpenAI("OpenAI", Type.CLOUD),
Anthropic("Anthropic", Type.CLOUD),
Mistral("Mistral", Type.CLOUD),
Groq("Groq", Type.CLOUD),
DeepInfra("DeepInfra", Type.CLOUD),
Google("Google", Type.CLOUD),
Exo("Exo (Experimental)", Type.LOCAL),
LLaMA("LLaMA.c++", Type.LOCAL),
OpenRouter("OpenRouter", Type.CLOUD),
DeepSeek("DeepSeek", Type.CLOUD),
Jlama("Jlama (Experimental /w REST API)", Type.LOCAL),
AzureOpenAI("AzureOpenAI", Type.OPTIONAL),
CustomOpenAI("CustomOpenAI", Type.OPTIONAL);

AzureOpenAI("AzureOpenAI", Type.OPTIONAL);

public enum Type {
LOCAL, // Local Providers
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ public interface DevoxxGenieSettingsService {

String getJanModelUrl();

String getExoModelUrl();

String getOpenAIKey();

String getAzureOpenAIEndpoint();
Expand Down Expand Up @@ -104,8 +102,6 @@ public interface DevoxxGenieSettingsService {

void setJanModelUrl(String url);

void setExoModelUrl(String url);

void setOpenAIKey(String key);

void setAzureOpenAIEndpoint(String endpoint);
Expand Down Expand Up @@ -188,7 +184,7 @@ public interface DevoxxGenieSettingsService {

String getCustomOpenAIUrl();

String getJlamaUrl();
void setCustomOpenAIModelName(String text);

void setJlamaUrl(String text);
String getCustomOpenAIModelName();
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,13 @@

import com.devoxx.genie.chatmodel.anthropic.AnthropicChatModelFactory;
import com.devoxx.genie.chatmodel.azureopenai.AzureOpenAIChatModelFactory;
import com.devoxx.genie.chatmodel.customopenai.CustomOpenAIChatModelFactory;
import com.devoxx.genie.chatmodel.deepinfra.DeepInfraChatModelFactory;
import com.devoxx.genie.chatmodel.deepseek.DeepSeekChatModelFactory;
import com.devoxx.genie.chatmodel.exo.ExoChatModelFactory;
import com.devoxx.genie.chatmodel.google.GoogleChatModelFactory;
import com.devoxx.genie.chatmodel.gpt4all.GPT4AllChatModelFactory;
import com.devoxx.genie.chatmodel.groq.GroqChatModelFactory;
import com.devoxx.genie.chatmodel.jan.JanChatModelFactory;
import com.devoxx.genie.chatmodel.jlama.JLamaChatModelFactory;
import com.devoxx.genie.chatmodel.lmstudio.LMStudioChatModelFactory;
import com.devoxx.genie.chatmodel.mistral.MistralChatModelFactory;
import com.devoxx.genie.chatmodel.ollama.OllamaChatModelFactory;
Expand Down Expand Up @@ -42,21 +41,20 @@ private ChatModelFactoryProvider() {
*/
private static @Nullable ChatModelFactory createFactory(@NotNull String modelProvider) {
return switch (modelProvider) {
case "Ollama" -> new OllamaChatModelFactory();
case "Jan" -> new JanChatModelFactory();
case "OpenRouter" -> new OpenRouterChatModelFactory();
case "LMStudio" -> new LMStudioChatModelFactory();
case "Exo" -> new ExoChatModelFactory();
case "OpenAI" -> new OpenAIChatModelFactory();
case "Anthropic" -> new AnthropicChatModelFactory();
case "Mistral" -> new MistralChatModelFactory();
case "Groq" -> new GroqChatModelFactory();
case "AzureOpenAI" -> new AzureOpenAIChatModelFactory();
case "CustomOpenAI" -> new CustomOpenAIChatModelFactory();
case "DeepInfra" -> new DeepInfraChatModelFactory();
case "Google" -> new GoogleChatModelFactory();
case "DeepSeek" -> new DeepSeekChatModelFactory();
case "Jlama" -> new JLamaChatModelFactory();
case "AzureOpenAI" -> new AzureOpenAIChatModelFactory();
case "Google" -> new GoogleChatModelFactory();
case "Groq" -> new GroqChatModelFactory();
case "GPT4All" -> new GPT4AllChatModelFactory();
case "Jan" -> new JanChatModelFactory();
case "LMStudio" -> new LMStudioChatModelFactory();
case "Mistral" -> new MistralChatModelFactory();
case "Ollama" -> new OllamaChatModelFactory();
case "OpenAI" -> new OpenAIChatModelFactory();
case "OpenRouter" -> new OpenRouterChatModelFactory();
default -> null;
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,14 +81,11 @@ private void setLocalBaseUrl(@NotNull LanguageModel languageModel,
case GPT4All:
chatModel.setBaseUrl(stateService.getGpt4allModelUrl());
break;
case Exo:
chatModel.setBaseUrl(stateService.getExoModelUrl());
break;
case LLaMA:
chatModel.setBaseUrl(stateService.getLlamaCPPUrl());
break;
case Jlama:
chatModel.setBaseUrl(stateService.getJlamaUrl());
case CustomOpenAI:
chatModel.setBaseUrl(stateService.getCustomOpenAIUrl());
break;
// Add other local providers as needed
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.devoxx.genie.chatmodel.jlama;
package com.devoxx.genie.chatmodel.customopenai;

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
Expand All @@ -14,13 +14,14 @@
import java.util.Collections;
import java.util.List;

public class JLamaChatModelFactory implements ChatModelFactory {
public class CustomOpenAIChatModelFactory implements ChatModelFactory {

@Override
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
DevoxxGenieStateService stateInstance = DevoxxGenieStateService.getInstance();
return LocalAiChatModel.builder()
.baseUrl(DevoxxGenieStateService.getInstance().getJlamaUrl())
.modelName(TEST_MODEL)
.baseUrl(stateInstance.getCustomOpenAIUrl())
.modelName(stateInstance.getCustomOpenAIModelName().isBlank()?"default":stateInstance.getCustomOpenAIModelName())
.maxRetries(chatModel.getMaxRetries())
.temperature(chatModel.getTemperature())
.maxTokens(chatModel.getMaxTokens())
Expand All @@ -31,17 +32,18 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {

@Override
public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
DevoxxGenieStateService stateInstance = DevoxxGenieStateService.getInstance();
return LocalAiStreamingChatModel.builder()
.baseUrl(DevoxxGenieStateService.getInstance().getJlamaUrl())
.modelName(TEST_MODEL)
.baseUrl(stateInstance.getCustomOpenAIUrl())
.modelName(stateInstance.getCustomOpenAIModelName().isBlank()?"default":stateInstance.getCustomOpenAIModelName())
.temperature(chatModel.getTemperature())
.topP(chatModel.getTopP())
.timeout(Duration.ofSeconds(chatModel.getTimeout()))
.build();
}

/**
* Get the model names from the Jlama service.
* Get the model names from the custom local OpenAI compliant service.
* @return List of model names
*/
@Override
Expand Down
153 changes: 0 additions & 153 deletions src/main/java/com/devoxx/genie/chatmodel/exo/ExoChatModelFactory.java

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
builder.baseUrl(DevoxxGenieStateService.getInstance().getCustomOpenAIUrl());
}

if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModel())) {
builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModel());
if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName())) {
builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName());
}

return builder.build();
Expand All @@ -56,8 +56,8 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch
builder.baseUrl(DevoxxGenieStateService.getInstance().getCustomOpenAIUrl());
}

if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModel())) {
builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModel());
if (Strings.isNotBlank(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName())) {
builder.modelName(DevoxxGenieStateService.getInstance().getCustomOpenAIModelName());
}

return builder.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,8 @@ private LanguageModel createDefaultLanguageModel(@NotNull DevoxxGenieSettingsSer
ModelProvider selectedProvider = (ModelProvider) modelProviderComboBox.getSelectedItem();
if (selectedProvider != null &&
(selectedProvider.equals(LMStudio) ||
selectedProvider.equals(GPT4All) ||
selectedProvider.equals(Jlama) ||
selectedProvider.equals(LLaMA))) {
selectedProvider.equals(GPT4All) ||
selectedProvider.equals(LLaMA))) {
return LanguageModel.builder()
.provider(selectedProvider)
.apiKeyUsed(false)
Expand Down
2 changes: 0 additions & 2 deletions src/main/java/com/devoxx/genie/model/Constant.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,7 @@ private Constant() {
public static final String LMSTUDIO_MODEL_URL = "http://localhost:1234/v1/";
public static final String GPT4ALL_MODEL_URL = "http://localhost:4891/v1/";
public static final String JAN_MODEL_URL = "http://localhost:1337/v1/";
public static final String EXO_MODEL_URL = "http://localhost:8000/v1/";
public static final String LLAMA_CPP_MODEL_URL = "http://localhost:8080";
public static final String JLAMA_MODEL_URL = "http://localhost:8080/";

// ActionCommands
public static final String SUBMIT_ACTION = "submit";
Expand Down
Loading
Loading