From dd4648ed9a803568b839e2510ca01cf7f1c6f740 Mon Sep 17 00:00:00 2001
From: "l.tingting" <l.tingting@pku.edu.cn>
Date: Wed, 24 Apr 2024 22:59:14 +0800
Subject: [PATCH] remove max_tokens from the official version of gpt4-turbo

---
 app/client/platforms/openai.ts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index ca8bc2ebe6f..f3599263023 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -129,7 +129,7 @@ export class ChatGPTApi implements LLMApi {
     };
 
     // add max_tokens to vision model
-    if (visionModel) {
+    if (visionModel && modelConfig.model.includes("preview")) {
       requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
     }