From 0e0572b0f700b7c0857aa81c3b19befd433a0a8a Mon Sep 17 00:00:00 2001 From: DreamsCat <52805134+DreamsCat@users.noreply.github.com> Date: Tue, 27 Feb 2024 15:59:35 +0800 Subject: [PATCH] Update openai.ts uncommet max_tokens_config --- app/client/platforms/openai.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 919716bfb0a..933fafb0e4b 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -106,7 +106,7 @@ export class ChatGPTApi implements LLMApi { presence_penalty: modelConfig.presence_penalty, frequency_penalty: modelConfig.frequency_penalty, top_p: modelConfig.top_p, - // max_tokens: Math.max(modelConfig.max_tokens, 1024), + max_tokens: Math.max(modelConfig.max_tokens, 1024), // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. };