diff --git a/app/api/openai.ts b/app/api/openai.ts
index bbba69e569c..2b5deca8be3 100644
--- a/app/api/openai.ts
+++ b/app/api/openai.ts
@@ -14,7 +14,7 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
if (config.disableGPT4) {
remoteModelRes.data = remoteModelRes.data.filter(
(m) =>
- !(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
+ !(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o") || m.id.startsWith("o1")) ||
m.id.startsWith("gpt-4o-mini"),
);
}
diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index 7c1588440b2..15cfb7ca602 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -224,7 +224,7 @@ export class ChatGPTApi implements LLMApi {
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
requestPayload = {
messages,
- stream: !isO1 ? options.config.stream : false,
+ stream: options.config.stream,
model: modelConfig.model,
temperature: !isO1 ? modelConfig.temperature : 1,
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
@@ -247,7 +247,7 @@ export class ChatGPTApi implements LLMApi {
console.log("[Request] openai payload: ", requestPayload);
- const shouldStream = !isDalle3 && !!options.config.stream && !isO1;
+ const shouldStream = !isDalle3 && !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
diff --git a/app/components/chat.tsx b/app/components/chat.tsx
index fb3b0e55702..61328e309b9 100644
--- a/app/components/chat.tsx
+++ b/app/components/chat.tsx
@@ -1664,8 +1664,7 @@ function _Chat() {
};
useEffect(() => {
fetchData();
- });
-
+ }, []);
return (
<>
diff --git a/app/components/emoji.tsx b/app/components/emoji.tsx
index 6db746c462c..d75cdda9268 100644
--- a/app/components/emoji.tsx
+++ b/app/components/emoji.tsx
@@ -37,7 +37,8 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
return (
{props.model?.startsWith("gpt-4") ||
- props.model?.startsWith("chatgpt-4o") ? (
+ props.model?.startsWith("chatgpt-4o") ||
+ props.model?.startsWith("o1") ? (
) : (
diff --git a/app/components/settings.tsx b/app/components/settings.tsx
index ddbda1b730a..a74ff17b1f5 100644
--- a/app/components/settings.tsx
+++ b/app/components/settings.tsx
@@ -1771,9 +1771,11 @@ export function Settings() {
{
if (customModels) customModels += ",";
customModels += DEFAULT_MODELS.filter(
(m) =>
- (m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o")) &&
+ (m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o") || m.name.startsWith("o1")) &&
!m.name.startsWith("gpt-4o-mini"),
)
.map((m) => "-" + m.name)
.join(",");
if (
(defaultModel.startsWith("gpt-4") ||
- defaultModel.startsWith("chatgpt-4o")) &&
+ defaultModel.startsWith("chatgpt-4o") ||
+ defaultModel.startsWith("o1")) &&
!defaultModel.startsWith("gpt-4o-mini")
)
defaultModel = "";
diff --git a/app/constant.ts b/app/constant.ts
index 84d1a32684a..fc7222bf566 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -264,6 +264,7 @@ export const KnowledgeCutOffDate: Record = {
"gpt-4o": "2023-10",
"gpt-4o-2024-05-13": "2023-10",
"gpt-4o-2024-08-06": "2023-10",
+ "gpt-4o-2024-11-20": "2023-10",
"chatgpt-4o-latest": "2023-10",
"gpt-4o-mini": "2023-10",
"gpt-4o-mini-2024-07-18": "2023-10",
@@ -303,6 +304,7 @@ const openaiModels = [
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
+ "gpt-4o-2024-11-20",
"chatgpt-4o-latest",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
@@ -318,6 +320,9 @@ const googleModels = [
"gemini-1.0-pro",
"gemini-1.5-pro-latest",
"gemini-1.5-flash-latest",
+ "gemini-exp-1114",
+ "gemini-exp-1121",
+ "learnlm-1.5-pro-experimental",
"gemini-pro-vision",
];
diff --git a/app/utils.ts b/app/utils.ts
index 51039e1d0c0..8901186a4ea 100644
--- a/app/utils.ts
+++ b/app/utils.ts
@@ -260,6 +260,8 @@ export function isVisionModel(model: string) {
"gpt-4o",
"claude-3",
"gemini-1.5",
+ "gemini-exp",
+ "learnlm",
"qwen-vl",
"qwen2-vl",
"gemini-2.0",