From 7a26efb0595a6c952bbac7d2a45ae55b807514d2 Mon Sep 17 00:00:00 2001 From: Thomas Vitale Date: Mon, 16 Sep 2024 23:11:36 +0200 Subject: [PATCH] ChatOptions migrated from Float to Double --- .../ai/spring/StructuredDataExtractionService.java | 2 +- .../com/thomasvitale/ai/spring/TextClassifier.java | 2 +- .../com/thomasvitale/ai/spring/ChatController.java | 2 +- .../ai/spring/model/ChatModelController.java | 2 +- .../com/thomasvitale/ai/spring/ChatController.java | 4 ++-- .../ai/spring/model/ChatModelController.java | 4 ++-- .../com/thomasvitale/ai/spring/ChatController.java | 4 ++-- .../ai/spring/model/ChatModelController.java | 4 ++-- .../com/thomasvitale/ai/spring/ChatController.java | 4 ++-- .../ai/spring/model/ChatModelController.java | 4 ++-- .../com/thomasvitale/ai/spring/ChatController.java | 8 ++++---- .../com/thomasvitale/ai/spring/ChatController.java | 12 ++++++------ .../com/thomasvitale/ai/spring/ChatController.java | 12 ++++++------ 13 files changed, 32 insertions(+), 32 deletions(-) diff --git a/00-use-cases/structured-data-extraction/src/main/java/com/thomasvitale/ai/spring/StructuredDataExtractionService.java b/00-use-cases/structured-data-extraction/src/main/java/com/thomasvitale/ai/spring/StructuredDataExtractionService.java index a1109f2..4b9b4b9 100644 --- a/00-use-cases/structured-data-extraction/src/main/java/com/thomasvitale/ai/spring/StructuredDataExtractionService.java +++ b/00-use-cases/structured-data-extraction/src/main/java/com/thomasvitale/ai/spring/StructuredDataExtractionService.java @@ -12,7 +12,7 @@ class StructuredDataExtractionService { StructuredDataExtractionService(ChatClient.Builder chatClientBuilder) { this.chatClient = chatClientBuilder .defaultOptions(ChatOptionsBuilder.builder() - .withTemperature(0.0f) + .withTemperature(0.0) .build()) .build(); } diff --git a/00-use-cases/text-classification/src/main/java/com/thomasvitale/ai/spring/TextClassifier.java b/00-use-cases/text-classification/src/main/java/com/thomasvitale/ai/spring/TextClassifier.java index 4d8e576..e7597df 100644 --- a/00-use-cases/text-classification/src/main/java/com/thomasvitale/ai/spring/TextClassifier.java +++ b/00-use-cases/text-classification/src/main/java/com/thomasvitale/ai/spring/TextClassifier.java @@ -18,7 +18,7 @@ class TextClassifier { TextClassifier(ChatClient.Builder chatClientBuilder) { this.chatClient = chatClientBuilder .defaultOptions(ChatOptionsBuilder.builder() - .withTemperature(0.0f) + .withTemperature(0.0) .build()) .build(); } diff --git a/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java b/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java index 7bf7c5f..2438dd2 100644 --- a/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java +++ b/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java @@ -33,7 +33,7 @@ String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say return chatClient.prompt() .user(question) .options(ChatOptionsBuilder.builder() - .withTemperature(0.9f) + .withTemperature(0.9) .build()) .call() .content(); diff --git a/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java b/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java index 3155eae..aa9bc3a 100644 --- a/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java +++ b/01-chat-models/chat-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java @@ -31,7 +31,7 @@ String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") @GetMapping("/chat/generic-options") String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String question) { return chatModel.call(new Prompt(question, ChatOptionsBuilder.builder() - .withTemperature(0.9f) + .withTemperature(0.9) .build())) .getResult().getOutput().getContent(); } diff --git a/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/ChatController.java b/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/ChatController.java index 0bb48a1..dc2e946 100644 --- a/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/ChatController.java +++ b/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/ChatController.java @@ -45,7 +45,7 @@ String chatWithMistralAiOptions(@RequestParam(defaultValue = "What did Gandalf s .user(question) .options(MistralAiChatOptions.builder() .withModel("open-mixtral-8x7b") - .withTemperature(1.0f) + .withTemperature(1.0) .build()) .call() .content(); @@ -57,7 +57,7 @@ String chatWithOpenAiOptions(@RequestParam(defaultValue = "What did Gandalf say .user(question) .options(OpenAiChatOptions.builder() .withModel("gpt-4o-mini") - .withTemperature(1.0f) + .withTemperature(1.0) .build()) .call() .content(); diff --git a/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java b/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java index 7450546..901e9ea 100644 --- a/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java +++ b/01-chat-models/chat-models-multiple-providers/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java @@ -39,7 +39,7 @@ String chatOpenAi(@RequestParam(defaultValue = "What did Gandalf say to the Balr String chatWithMistralAiOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String question) { return mistralAiChatModel.call(new Prompt(question, MistralAiChatOptions.builder() .withModel("open-mixtral-8x7b") - .withTemperature(1.0f) + .withTemperature(1.0) .build())) .getResult().getOutput().getContent(); } @@ -48,7 +48,7 @@ String chatWithMistralAiOptions(@RequestParam(defaultValue = "What did Gandalf s String chatWithOpenAiOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String question) { return openAiChatModel.call(new Prompt(question, OpenAiChatOptions.builder() .withModel("gpt-4o-mini") - .withTemperature(1.0f) + .withTemperature(1.0) .build())) .getResult().getOutput().getContent(); } diff --git a/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java b/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java index 088c543..e49a3c2 100644 --- a/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java +++ b/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java @@ -33,7 +33,7 @@ String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say return chatClient.prompt() .user(question) .options(ChatOptionsBuilder.builder() - .withTemperature(0.9f) + .withTemperature(0.9) .build()) .call() .content(); @@ -45,7 +45,7 @@ String chatWithProviderOptions(@RequestParam(defaultValue = "What did Gandalf sa .user(question) .options(OllamaOptions.create() .withModel("mistral") - .withRepeatPenalty(1.5f)) + .withRepeatPenalty(1.5)) .call() .content(); } diff --git a/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java b/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java index a0cdbd5..a3b0710 100644 --- a/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java +++ b/01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java @@ -31,7 +31,7 @@ String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") @GetMapping("/chat/generic-options") String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String question) { return chatModel.call(new Prompt(question, ChatOptionsBuilder.builder() - .withTemperature(0.9f) + .withTemperature(0.9) .build())) .getResult().getOutput().getContent(); } @@ -40,7 +40,7 @@ String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say String chatWithProviderOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String question) { return chatModel.call(new Prompt(question, OllamaOptions.create() .withModel("mistral") - .withRepeatPenalty(1.5f))) + .withRepeatPenalty(1.5))) .getResult().getOutput().getContent(); } diff --git a/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java b/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java index 889e646..711f0dc 100644 --- a/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java +++ b/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java @@ -33,7 +33,7 @@ String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say return chatClient.prompt() .user(question) .options(ChatOptionsBuilder.builder() - .withTemperature(0.9f) + .withTemperature(0.9) .build()) .call() .content(); @@ -45,7 +45,7 @@ String chatWithProviderOptions(@RequestParam(defaultValue = "What did Gandalf sa .user(question) .options(OpenAiChatOptions.builder() .withModel("gpt-4o-mini") - .withTemperature(0.9f) + .withTemperature(0.9) .withUser("jon.snow") .build()) .call() diff --git a/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java b/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java index 063eb7b..ae5c6ed 100644 --- a/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java +++ b/01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/spring/model/ChatModelController.java @@ -31,7 +31,7 @@ String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") @GetMapping("/chat/generic-options") String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String question) { return chatModel.call(new Prompt(question, ChatOptionsBuilder.builder() - .withTemperature(0.9f) + .withTemperature(0.9) .build())) .getResult().getOutput().getContent(); } @@ -40,7 +40,7 @@ String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say String chatWithProviderOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String question) { return chatModel.call(new Prompt(question, OpenAiChatOptions.builder() .withModel("gpt-4o-mini") - .withTemperature(0.9f) + .withTemperature(0.9) .withUser("jon.snow") .build())) .getResult().getOutput().getContent(); diff --git a/14-observability/observability-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java b/14-observability/observability-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java index f30ebc8..483b481 100644 --- a/14-observability/observability-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java +++ b/14-observability/observability-models-mistral-ai/src/main/java/com/thomasvitale/ai/spring/ChatController.java @@ -33,7 +33,7 @@ String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") @GetMapping("/chat/generic-options") String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { return chatModel.call(new Prompt(message, ChatOptionsBuilder.builder() - .withTemperature(1.3f) + .withTemperature(1.3) .build())) .getResult().getOutput().getContent(); } @@ -43,8 +43,8 @@ String chatWithMistralAiOptions(@RequestParam(defaultValue = "What did Gandalf s return chatModel.call(new Prompt(message, MistralAiChatOptions.builder() .withMaxTokens(1500) .withStop(List.of("this-is-the-end", "addio")) - .withTemperature(0.7f) - .withTopP(0.1f) + .withTemperature(0.7) + .withTopP(0.1) .build())) .getResult().getOutput().getContent(); } @@ -53,7 +53,7 @@ String chatWithMistralAiOptions(@RequestParam(defaultValue = "What did Gandalf s String chatWithFunctions(@RequestParam(defaultValue = "Philip Pullman") String author) { return chatModel.call(new Prompt("What books written by %s are available to read and what is their bestseller?".formatted(author), MistralAiChatOptions.builder() - .withTemperature(0.3f) + .withTemperature(0.3) .withFunctions(Set.of("booksByAuthor", "bestsellerBookByAuthor")) .build())) .getResult().getOutput().getContent(); diff --git a/14-observability/observability-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java b/14-observability/observability-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java index f62c513..f35661e 100644 --- a/14-observability/observability-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java +++ b/14-observability/observability-models-ollama/src/main/java/com/thomasvitale/ai/spring/ChatController.java @@ -33,7 +33,7 @@ String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") @GetMapping("/chat/generic-options") String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { return chatModel.call(new Prompt(message, ChatOptionsBuilder.builder() - .withTemperature(1.3f) + .withTemperature(1.3) .build())) .getResult().getOutput().getContent(); } @@ -41,13 +41,13 @@ String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say @GetMapping("/chat/ollama-options") String chatWithOllamaOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { return chatModel.call(new Prompt(message, OllamaOptions.builder() - .withFrequencyPenalty(1.3f) + .withFrequencyPenalty(1.3) .withNumPredict(1500) - .withPresencePenalty(1.0f) + .withPresencePenalty(1.0) .withStop(List.of("this-is-the-end", "addio")) - .withTemperature(0.7f) + .withTemperature(0.7) .withTopK(1) - .withTopP(0f) + .withTopP(0.0) .build())) .getResult().getOutput().getContent(); } @@ -56,7 +56,7 @@ String chatWithOllamaOptions(@RequestParam(defaultValue = "What did Gandalf say String chatWithFunctions(@RequestParam(defaultValue = "Philip Pullman") String author) { return chatModel.call(new Prompt("What books written by %s are available to read and what is their bestseller?".formatted(author), OllamaOptions.builder() - .withTemperature(0.3f) + .withTemperature(0.3) .withFunctions(Set.of("booksByAuthor", "bestsellerBookByAuthor")) .build())) .getResult().getOutput().getContent(); diff --git a/14-observability/observability-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java b/14-observability/observability-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java index 8e36054..978db03 100644 --- a/14-observability/observability-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java +++ b/14-observability/observability-models-openai/src/main/java/com/thomasvitale/ai/spring/ChatController.java @@ -33,7 +33,7 @@ String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") @GetMapping("/chat/generic-options") String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { return chatModel.call(new Prompt(message, ChatOptionsBuilder.builder() - .withTemperature(1.3f) + .withTemperature(1.3) .build())) .getResult().getOutput().getContent(); } @@ -41,12 +41,12 @@ String chatWithGenericOptions(@RequestParam(defaultValue = "What did Gandalf say @GetMapping("/chat/openai-options") String chatWithOpenAiOptions(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { return chatModel.call(new Prompt(message, OpenAiChatOptions.builder() - .withFrequencyPenalty(1.3f) + .withFrequencyPenalty(1.3) .withMaxTokens(1500) - .withPresencePenalty(1.0f) + .withPresencePenalty(1.0) .withStop(List.of("this-is-the-end", "addio")) - .withTemperature(0.7f) - .withTopP(0f) + .withTemperature(0.7) + .withTopP(0.0) .withUser("jon.snow") .build())) .getResult().getOutput().getContent(); @@ -56,7 +56,7 @@ String chatWithOpenAiOptions(@RequestParam(defaultValue = "What did Gandalf say String chatWithFunctions(@RequestParam(defaultValue = "Philip Pullman") String author) { return chatModel.call(new Prompt("What books written by %s are available to read and what is their bestseller?".formatted(author), OpenAiChatOptions.builder() - .withTemperature(0.3f) + .withTemperature(0.3) .withFunctions(Set.of("booksByAuthor", "bestsellerBookByAuthor")) .build())) .getResult().getOutput().getContent();