Skip to content

Commit

Permalink
Include numeric value Spinners
Browse files Browse the repository at this point in the history
  • Loading branch information
stephanj committed Jun 3, 2024
1 parent 3c43826 commit 342247b
Show file tree
Hide file tree
Showing 37 changed files with 232 additions and 995 deletions.
14 changes: 6 additions & 8 deletions src/main/java/com/devoxx/genie/chatmodel/ChatModelProvider.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import com.devoxx.genie.model.Constant;
import com.devoxx.genie.model.enumarations.ModelProvider;
import com.devoxx.genie.model.request.ChatMessageContext;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llmconfig.LLMConfigStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import lombok.Setter;
Expand Down Expand Up @@ -83,7 +83,7 @@ public StreamingChatLanguageModel getStreamingChatLanguageModel(@NotNull ChatMes
*/
public @NotNull ChatModel initChatModel(@NotNull ChatMessageContext chatMessageContext) {
ChatModel chatModel = new ChatModel();
SettingsStateService settingsState = SettingsStateService.getInstance();
LLMConfigStateService settingsState = LLMConfigStateService.getInstance();
setMaxOutputTokens(settingsState, chatModel);

chatModel.setTemperature(settingsState.getTemperature());
Expand All @@ -101,15 +101,13 @@ public StreamingChatLanguageModel getStreamingChatLanguageModel(@NotNull ChatMes
* @param settingsState the settings state
* @param chatModel the chat model
*/
private static void setMaxOutputTokens(@NotNull SettingsStateService settingsState, ChatModel chatModel) {
String maxOutputTokens = settingsState.getMaxOutputTokens();
if (maxOutputTokens.isBlank()) {
private static void setMaxOutputTokens(@NotNull LLMConfigStateService settingsState, ChatModel chatModel) {
Integer maxOutputTokens = settingsState.getMaxOutputTokens();
if (maxOutputTokens == null) {
chatModel.setMaxTokens(Constant.MAX_OUTPUT_TOKENS);
} else {
int value;
try {
value = Integer.parseInt(maxOutputTokens);
chatModel.setMaxTokens(value);
chatModel.setMaxTokens(maxOutputTokens);
} catch (NumberFormatException e) {
chatModel.setMaxTokens(Constant.MAX_OUTPUT_TOKENS);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.devoxx.genie.chatmodel;

import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import org.jetbrains.annotations.NotNull;

import java.util.*;
Expand Down Expand Up @@ -32,7 +32,7 @@ private LLMProviderConstant() {
};

public static @NotNull List<String> getLLMProviders() {
SettingsStateService settingState = SettingsStateService.getInstance();
LLMStateService settingState = LLMStateService.getInstance();
Map<String, Supplier<String>> providerKeyMap = new HashMap<>();
providerKeyMap.put(OpenAI.getName(), settingState::getOpenAIKey);
providerKeyMap.put(Anthropic.getName(), settingState::getAnthropicKey);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.anthropic.AnthropicChatModel;
import dev.langchain4j.model.anthropic.AnthropicStreamingChatModel;
import dev.langchain4j.model.chat.ChatLanguageModel;
Expand Down Expand Up @@ -40,7 +40,7 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch

@Override
public String getApiKey() {
return SettingsStateService.getInstance().getAnthropicKey().trim();
return LLMStateService.getInstance().getAnthropicKey().trim();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
Expand Down Expand Up @@ -42,7 +42,7 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch

@Override
public String getApiKey() {
return SettingsStateService.getInstance().getDeepInfraKey().trim();
return LLMStateService.getInstance().getDeepInfraKey().trim();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.model.gemini.GeminiChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import org.jetbrains.annotations.NotNull;

Expand All @@ -26,7 +26,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {

@Override
public String getApiKey() {
return SettingsStateService.getInstance().getGeminiKey().trim();
return LLMStateService.getInstance().getGeminiKey().trim();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.localai.LocalAiChatModel;
Expand All @@ -16,7 +16,7 @@ public class GPT4AllChatModelFactory implements ChatModelFactory {
@Override
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
return LocalAiChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getGpt4allModelUrl())
.baseUrl(LLMStateService.getInstance().getGpt4allModelUrl())
.modelName("test-model")
.maxRetries(chatModel.getMaxRetries())
.maxTokens(chatModel.getMaxTokens())
Expand All @@ -28,7 +28,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {

public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
return LocalAiStreamingChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getGpt4allModelUrl())
.baseUrl(LLMStateService.getInstance().getGpt4allModelUrl())
.modelName("test-model")
.temperature(chatModel.getTemperature())
.topP(chatModel.getTopP())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.jetbrains.annotations.NotNull;
Expand Down Expand Up @@ -40,7 +40,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {

@Override
public String getApiKey() {
return SettingsStateService.getInstance().getGroqKey().trim();
return LLMStateService.getInstance().getGroqKey().trim();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.model.jan.Data;
import com.devoxx.genie.service.JanService;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import com.devoxx.genie.ui.util.NotificationUtil;
import com.intellij.openapi.project.ProjectManager;
import dev.langchain4j.model.chat.ChatLanguageModel;
Expand All @@ -27,7 +27,7 @@ public class JanChatModelFactory implements ChatModelFactory {
@Override
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
return LocalAiChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getJanModelUrl())
.baseUrl(LLMStateService.getInstance().getJanModelUrl())
.modelName(chatModel.getModelName())
.maxRetries(chatModel.getMaxRetries())
.temperature(chatModel.getTemperature())
Expand All @@ -41,7 +41,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
@Override
public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
return LocalAiStreamingChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getJanModelUrl())
.baseUrl(LLMStateService.getInstance().getJanModelUrl())
.modelName(chatModel.getModelName())
.temperature(chatModel.getTemperature())
.topP(chatModel.getTopP())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.localai.LocalAiChatModel;
Expand All @@ -16,7 +16,7 @@ public class LMStudioChatModelFactory implements ChatModelFactory {
@Override
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
return LocalAiChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getLmstudioModelUrl())
.baseUrl(LLMStateService.getInstance().getLmstudioModelUrl())
.modelName("LMStudio")
.temperature(chatModel.getTemperature())
.topP(chatModel.getTopP())
Expand All @@ -29,7 +29,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
@Override
public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
return LocalAiStreamingChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getLmstudioModelUrl())
.baseUrl(LLMStateService.getInstance().getLmstudioModelUrl())
.modelName("LMStudio")
.temperature(chatModel.getTemperature())
.topP(chatModel.getTopP())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
Expand Down Expand Up @@ -42,7 +42,7 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch

@Override
public String getApiKey() {
return SettingsStateService.getInstance().getMistralKey().trim();
return LLMStateService.getInstance().getMistralKey().trim();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.model.ollama.OllamaModelEntryDTO;
import com.devoxx.genie.service.OllamaService;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import com.devoxx.genie.ui.util.NotificationUtil;
import com.intellij.openapi.project.ProjectManager;
import dev.langchain4j.model.chat.ChatLanguageModel;
Expand All @@ -23,7 +23,7 @@ public class OllamaChatModelFactory implements ChatModelFactory {
@Override
public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
return OllamaChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getOllamaModelUrl())
.baseUrl(LLMStateService.getInstance().getOllamaModelUrl())
.modelName(chatModel.getModelName())
.temperature(chatModel.getTemperature())
.topP(chatModel.getTopP())
Expand All @@ -35,7 +35,7 @@ public ChatLanguageModel createChatModel(@NotNull ChatModel chatModel) {
@Override
public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel chatModel) {
return OllamaStreamingChatModel.builder()
.baseUrl(SettingsStateService.getInstance().getOllamaModelUrl())
.baseUrl(LLMStateService.getInstance().getOllamaModelUrl())
.modelName(chatModel.getModelName())
.temperature(chatModel.getTemperature())
.topP(chatModel.getTopP())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.chatmodel.ChatModelFactory;
import com.devoxx.genie.model.ChatModel;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
Expand Down Expand Up @@ -41,7 +41,7 @@ public StreamingChatLanguageModel createStreamingChatModel(@NotNull ChatModel ch

@Override
public String getApiKey() {
return SettingsStateService.getInstance().getOpenAIKey().trim();
return LLMStateService.getInstance().getOpenAIKey().trim();
}

@Override
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/com/devoxx/genie/model/ChatModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ public class ChatModel {

private String baseUrl;
private String modelName;
private Double temperature = Constant.TEMPERATURE;
private Double topP = Constant.TOP_P;
private double temperature = Constant.TEMPERATURE;
private double topP = Constant.TOP_P;
private int maxTokens = Constant.MAX_OUTPUT_TOKENS;
private int maxRetries = Constant.MAX_RETRIES;
private int timeout = Constant.TIMEOUT;
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/com/devoxx/genie/service/ChatMemoryService.java
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.devoxx.genie.service;

import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.listener.ChatMemorySizeListener;
import com.devoxx.genie.ui.settings.llmconfig.LLMConfigStateService;
import com.devoxx.genie.ui.topic.AppTopics;
import com.intellij.openapi.application.ApplicationManager;
import dev.langchain4j.data.message.ChatMessage;
Expand All @@ -22,7 +22,7 @@ public class ChatMemoryService implements ChatMemorySizeListener {
* @link PostStartupActivity
*/
public void init() {
createChatMemory(SettingsStateService.getInstance().getChatMemorySize());
createChatMemory(LLMConfigStateService.getInstance().getChatMemorySize());
createChangeListener();
}

Expand Down
16 changes: 8 additions & 8 deletions src/main/java/com/devoxx/genie/service/ChatPromptExecutor.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
package com.devoxx.genie.service;

import com.devoxx.genie.model.request.ChatMessageContext;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.panel.PromptOutputPanel;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import com.devoxx.genie.ui.settings.prompt.PromptSettingsStateService;
import com.devoxx.genie.ui.util.NotificationUtil;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
Expand All @@ -18,7 +19,6 @@
public class ChatPromptExecutor {

private final PromptExecutionService promptExecutionService = PromptExecutionService.getInstance();
private final SettingsStateService settingsState = SettingsStateService.getInstance();

public ChatPromptExecutor() {
}
Expand All @@ -39,7 +39,7 @@ public void run(@NotNull ProgressIndicator progressIndicator) {
if (chatMessageContext.getContext() != null && chatMessageContext.getContext().toLowerCase().contains("search")) {
webSearchPrompt(chatMessageContext, promptOutputPanel, enableButtons);
} else {
if (SettingsStateService.getInstance().getStreamMode()) {
if (LLMStateService.getInstance().getStreamMode()) {
setupStreaming(chatMessageContext, promptOutputPanel, enableButtons);
} else {
runPrompt(chatMessageContext, promptOutputPanel, enableButtons);
Expand Down Expand Up @@ -99,7 +99,7 @@ private void setupStreaming(@NotNull ChatMessageContext chatMessageContext,
MessageCreationService messageCreationService = MessageCreationService.getInstance();

if (chatMemoryService.isEmpty()) {
chatMemoryService.add(new SystemMessage(SettingsStateService.getInstance().getSystemPrompt()));
chatMemoryService.add(new SystemMessage(PromptSettingsStateService.getInstance().getSystemPrompt()));
}

UserMessage userMessage = messageCreationService.createUserMessage(chatMessageContext);
Expand All @@ -124,13 +124,13 @@ private Optional<String> getCommandFromPrompt(@NotNull String prompt,
if (prompt.startsWith("/")) {

if (prompt.equalsIgnoreCase("/test")) {
prompt = settingsState.getTestPrompt();
prompt = PromptSettingsStateService.getInstance().getTestPrompt();
} else if (prompt.equalsIgnoreCase("/review")) {
prompt = settingsState.getReviewPrompt();
prompt = PromptSettingsStateService.getInstance().getReviewPrompt();
} else if (prompt.equalsIgnoreCase("/explain")) {
prompt = settingsState.getExplainPrompt();
prompt = PromptSettingsStateService.getInstance().getExplainPrompt();
} else if (prompt.equalsIgnoreCase("/custom")) {
prompt = settingsState.getCustomPrompt();
prompt = PromptSettingsStateService.getInstance().getCustomPrompt();
} else {
promptOutputPanel.showHelpText();
return Optional.empty();
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/com/devoxx/genie/service/JanService.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import com.devoxx.genie.model.jan.Data;
import com.devoxx.genie.model.jan.ResponseDTO;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import com.google.gson.Gson;
import okhttp3.OkHttpClient;
import okhttp3.Request;
Expand All @@ -21,7 +21,7 @@ public JanService(OkHttpClient client) {
}

public List<Data> getModels() throws IOException {
String baseUrl = ensureEndsWithSlash(SettingsStateService.getInstance().getJanModelUrl());
String baseUrl = ensureEndsWithSlash(LLMStateService.getInstance().getJanModelUrl());

Request request = new Request.Builder()
.url(baseUrl + "models")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package com.devoxx.genie.service;

import com.devoxx.genie.model.request.ChatMessageContext;
import com.devoxx.genie.service.settings.SettingsStateService;
import com.devoxx.genie.ui.settings.llm.LLMStateService;
import com.devoxx.genie.ui.settings.llmconfig.LLMConfigStateService;
import com.devoxx.genie.ui.util.NotificationUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
Expand Down Expand Up @@ -100,7 +101,7 @@ public static MessageCreationService getInstance() {

appendIfNotEmpty(sb, chatMessageContext.getEditorInfo().getSelectedText());

if (SettingsStateService.getInstance().getAstMode()) {
if (LLMConfigStateService.getInstance().getAstMode()) {
addASTContext(chatMessageContext, sb);
}

Expand Down
Loading

0 comments on commit 342247b

Please sign in to comment.