diff --git a/aigenpipeline-framework/src/main/java/net/stoerr/ai/aigenpipeline/framework/chat/OpenAIChatBuilderImpl.java b/aigenpipeline-framework/src/main/java/net/stoerr/ai/aigenpipeline/framework/chat/OpenAIChatBuilderImpl.java index 853196c..16e1c70 100644 --- a/aigenpipeline-framework/src/main/java/net/stoerr/ai/aigenpipeline/framework/chat/OpenAIChatBuilderImpl.java +++ b/aigenpipeline-framework/src/main/java/net/stoerr/ai/aigenpipeline/framework/chat/OpenAIChatBuilderImpl.java @@ -49,7 +49,7 @@ public class OpenAIChatBuilderImpl implements AIChatBuilder { */ public static final String ANTHROPIC_DEFAULT_VERSION = "2023-06-01"; - public static final int DEFAULT_MAX_TOKENS = 2048; + public static final int DEFAULT_MAX_TOKENS = 10240; protected static final Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); public static final String ROLE_SYSTEM = "system"; diff --git a/aigenpipeline-framework/src/test/java/net/stoerr/ai/aigenpipeline/framework/chat/RunListLocalModel.java b/aigenpipeline-framework/src/test/java/net/stoerr/ai/aigenpipeline/framework/chat/RunListLocalModel.java index 0529311..c8a60d5 100644 --- a/aigenpipeline-framework/src/test/java/net/stoerr/ai/aigenpipeline/framework/chat/RunListLocalModel.java +++ b/aigenpipeline-framework/src/test/java/net/stoerr/ai/aigenpipeline/framework/chat/RunListLocalModel.java @@ -29,7 +29,7 @@ public static void main(String[] args) throws IOException, InterruptedException " }\n" + " ],\n" + " \"temperature\": 0.0,\n" + - " \"max_completion_tokens\": 2048\n" + + " \"max_completion_tokens\": 10240\n" + "}"; HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) diff --git a/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithReplacement.txt b/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithReplacement.txt index c5b8074..1140df5 100644 --- a/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithReplacement.txt +++ b/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithReplacement.txt @@ -33,7 +33,7 @@ Response to: } ], "temperature": 0.0, - "max_completion_tokens": 2048 + "max_completion_tokens": 10240 } this should be left alone at the end. diff --git a/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithVersion.txt b/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithVersion.txt index caf13cd..3b8bf00 100644 --- a/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithVersion.txt +++ b/aigenpipeline-framework/src/test/resources/aigenpipeline-test/expected/outputWithVersion.txt @@ -30,5 +30,5 @@ Response to: } ], "temperature": 0.0, - "max_completion_tokens": 2048 + "max_completion_tokens": 10240 } diff --git a/examples/infileprompt/copydata.md b/examples/infileprompt/copydata.md index 61751b3..109c647 100644 --- a/examples/infileprompt/copydata.md +++ b/examples/infileprompt/copydata.md @@ -39,5 +39,5 @@ AIGenPromptEnd(openaijson) --> } ], "temperature": 0.0, - "max_completion_tokens": 2048 + "max_completion_tokens": 10240 }