diff --git a/app/client/platforms/alibaba.ts b/app/client/platforms/alibaba.ts index 723ba774b8e..d5fa3042fc1 100644 --- a/app/client/platforms/alibaba.ts +++ b/app/client/platforms/alibaba.ts @@ -21,7 +21,7 @@ import { } from "@fortaine/fetch-event-source"; import { prettyObject } from "@/app/utils/format"; import { getClientConfig } from "@/app/config/client"; -import { getMessageTextContent, isVisionModel } from "@/app/utils"; +import { getMessageTextContent } from "@/app/utils"; export interface OpenAIListModelResponse { object: string; diff --git a/app/client/platforms/anthropic.ts b/app/client/platforms/anthropic.ts index bf8faf83763..b079ba1ada2 100644 --- a/app/client/platforms/anthropic.ts +++ b/app/client/platforms/anthropic.ts @@ -3,7 +3,6 @@ import { ChatOptions, getHeaders, LLMApi, MultimodalContent } from "../api"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { getClientConfig } from "@/app/config/client"; import { DEFAULT_API_HOST } from "@/app/constant"; -import { RequestMessage } from "@/app/typing"; import { EventStreamContentType, fetchEventSource, @@ -12,6 +11,7 @@ import { import Locale from "../../locales"; import { prettyObject } from "@/app/utils/format"; import { getMessageTextContent, isVisionModel } from "@/app/utils"; +import { preProcessImageContent } from "@/app/utils/chat"; import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare"; export type MultiBlockContent = { @@ -93,7 +93,12 @@ export class ClaudeApi implements LLMApi { }, }; - const messages = [...options.messages]; + // try get base64image from local cache image_url + const messages: ChatOptions["messages"] = []; + for (const v of options.messages) { + const content = await preProcessImageContent(v.content); + messages.push({ role: v.role, content }); + } const keys = ["system", "user"]; diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 8acde1a83f1..753a768c6c9 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -14,6 +14,7 @@ import { getMessageImages, isVisionModel, } from "@/app/utils"; +import { preProcessImageContent } from "@/app/utils/chat"; export class GeminiProApi implements LLMApi { path(path: string): string { @@ -56,7 +57,14 @@ export class GeminiProApi implements LLMApi { async chat(options: ChatOptions): Promise { const apiClient = this; let multimodal = false; - const messages = options.messages.map((v) => { + + // try get base64image from local cache image_url + const _messages: ChatOptions["messages"] = []; + for (const v of options.messages) { + const content = await preProcessImageContent(v.content); + _messages.push({ role: v.role, content }); + } + const messages = _messages.map((v) => { let parts: any[] = [{ text: getMessageTextContent(v) }]; if (isVisionModel(options.config.model)) { const images = getMessageImages(v); diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 98851c224c1..680125fe6c4 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -11,6 +11,7 @@ import { } from "@/app/constant"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { collectModelsWithDefaultModel } from "@/app/utils/model"; +import { preProcessImageContent } from "@/app/utils/chat"; import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare"; import { @@ -105,10 +106,13 @@ export class ChatGPTApi implements LLMApi { async chat(options: ChatOptions) { const visionModel = isVisionModel(options.config.model); - const messages = options.messages.map((v) => ({ - role: v.role, - content: visionModel ? v.content : getMessageTextContent(v), - })); + const messages: ChatOptions["messages"] = []; + for (const v of options.messages) { + const content = visionModel + ? await preProcessImageContent(v.content) + : getMessageTextContent(v); + messages.push({ role: v.role, content }); + } const modelConfig = { ...useAppConfig.getState().modelConfig, diff --git a/app/components/chat.tsx b/app/components/chat.tsx index f5e6788fa8b..08bcd04fd56 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -61,7 +61,7 @@ import { isVisionModel, } from "../utils"; -import { compressImage } from "@/app/utils/chat"; +import { uploadImage as uploadImageRemote } from "@/app/utils/chat"; import dynamic from "next/dynamic"; @@ -1167,7 +1167,7 @@ function _Chat() { ...(await new Promise((res, rej) => { setUploading(true); const imagesData: string[] = []; - compressImage(file, 256 * 1024) + uploadImageRemote(file) .then((dataUrl) => { imagesData.push(dataUrl); setUploading(false); @@ -1209,7 +1209,7 @@ function _Chat() { const imagesData: string[] = []; for (let i = 0; i < files.length; i++) { const file = event.target.files[i]; - compressImage(file, 256 * 1024) + uploadImageRemote(file) .then((dataUrl) => { imagesData.push(dataUrl); if ( diff --git a/app/constant.ts b/app/constant.ts index ffbcca53f5f..f5d3c77f463 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -21,6 +21,9 @@ export const BYTEDANCE_BASE_URL = "https://ark.cn-beijing.volces.com"; export const ALIBABA_BASE_URL = "https://dashscope.aliyuncs.com/api/"; +export const CACHE_URL_PREFIX = "/api/cache"; +export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`; + export enum Path { Home = "/", Chat = "/chat", @@ -239,7 +242,7 @@ const baiduModels = [ "ernie-speed-128k", "ernie-speed-8k", "ernie-lite-8k", - "ernie-tiny-8k" + "ernie-tiny-8k", ]; const bytedanceModels = [ diff --git a/app/utils/chat.ts b/app/utils/chat.ts index 991d06b7320..24f37d1068d 100644 --- a/app/utils/chat.ts +++ b/app/utils/chat.ts @@ -1,6 +1,7 @@ -import heic2any from "heic2any"; +import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant"; +import { RequestMessage } from "@/app/client/api"; -export function compressImage(file: File, maxSize: number): Promise { +export function compressImage(file: Blob, maxSize: number): Promise { return new Promise((resolve, reject) => { const reader = new FileReader(); reader.onload = (readerEvent: any) => { @@ -40,15 +41,104 @@ export function compressImage(file: File, maxSize: number): Promise { reader.onerror = reject; if (file.type.includes("heic")) { - heic2any({ blob: file, toType: "image/jpeg" }) - .then((blob) => { - reader.readAsDataURL(blob as Blob); - }) - .catch((e) => { - reject(e); - }); + try { + const heic2any = require("heic2any"); + heic2any({ blob: file, toType: "image/jpeg" }) + .then((blob: Blob) => { + reader.readAsDataURL(blob); + }) + .catch((e: any) => { + reject(e); + }); + } catch (e) { + reject(e); + } } reader.readAsDataURL(file); }); } + +export async function preProcessImageContent( + content: RequestMessage["content"], +) { + if (typeof content === "string") { + return content; + } + const result = []; + for (const part of content) { + if (part?.type == "image_url" && part?.image_url?.url) { + try { + const url = await cacheImageToBase64Image(part?.image_url?.url); + result.push({ type: part.type, image_url: { url } }); + } catch (error) { + console.error("Error processing image URL:", error); + } + } else { + result.push({ ...part }); + } + } + return result; +} + +const imageCaches: Record = {}; +export function cacheImageToBase64Image(imageUrl: string) { + if (imageUrl.includes(CACHE_URL_PREFIX)) { + if (!imageCaches[imageUrl]) { + const reader = new FileReader(); + return fetch(imageUrl, { + method: "GET", + mode: "cors", + credentials: "include", + }) + .then((res) => res.blob()) + .then( + async (blob) => + (imageCaches[imageUrl] = await compressImage(blob, 256 * 1024)), + ); // compressImage + } + return Promise.resolve(imageCaches[imageUrl]); + } + return Promise.resolve(imageUrl); +} + +export function base64Image2Blob(base64Data: string, contentType: string) { + const byteCharacters = atob(base64Data); + const byteNumbers = new Array(byteCharacters.length); + for (let i = 0; i < byteCharacters.length; i++) { + byteNumbers[i] = byteCharacters.charCodeAt(i); + } + const byteArray = new Uint8Array(byteNumbers); + return new Blob([byteArray], { type: contentType }); +} + +export function uploadImage(file: File): Promise { + if (!window._SW_ENABLED) { + // if serviceWorker register error, using compressImage + return compressImage(file, 256 * 1024); + } + const body = new FormData(); + body.append("file", file); + return fetch(UPLOAD_URL, { + method: "post", + body, + mode: "cors", + credentials: "include", + }) + .then((res) => res.json()) + .then((res) => { + console.log("res", res); + if (res?.code == 0 && res?.data) { + return res?.data; + } + throw Error(`upload Error: ${res?.msg}`); + }); +} + +export function removeImage(imageUrl: string) { + return fetch(imageUrl, { + method: "DELETE", + mode: "cors", + credentials: "include", + }); +} diff --git a/public/serviceWorker.js b/public/serviceWorker.js index f5a24b70176..c58b2cc5aba 100644 --- a/public/serviceWorker.js +++ b/public/serviceWorker.js @@ -1,10 +1,13 @@ const CHATGPT_NEXT_WEB_CACHE = "chatgpt-next-web-cache"; +const CHATGPT_NEXT_WEB_FILE_CACHE = "chatgpt-next-web-file"; +let a="useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";let nanoid=(e=21)=>{let t="",r=crypto.getRandomValues(new Uint8Array(e));for(let n=0;n {}); +async function upload(request, url) { + const formData = await request.formData() + const file = formData.getAll('file')[0] + let ext = file.name.split('.').pop() + if (ext === 'blob') { + ext = file.type.split('/').pop() + } + const fileUrl = `${url.origin}/api/cache/${nanoid()}.${ext}` + // console.debug('file', file, fileUrl, request) + const cache = await caches.open(CHATGPT_NEXT_WEB_FILE_CACHE) + await cache.put(new Request(fileUrl), new Response(file, { + headers: { + 'content-type': file.type, + 'content-length': file.size, + 'cache-control': 'no-cache', // file already store in disk + 'server': 'ServiceWorker', + } + })) + return Response.json({ code: 0, data: fileUrl }) +} + +async function remove(request, url) { + const cache = await caches.open(CHATGPT_NEXT_WEB_FILE_CACHE) + const res = await cache.delete(request.url) + return Response.json({ code: 0 }) +} + +self.addEventListener("fetch", (e) => { + const url = new URL(e.request.url); + if (/^\/api\/cache/.test(url.pathname)) { + if ('GET' == e.request.method) { + e.respondWith(caches.match(e.request)) + } + if ('POST' == e.request.method) { + e.respondWith(upload(e.request, url)) + } + if ('DELETE' == e.request.method) { + e.respondWith(remove(e.request, url)) + } + } +}); + diff --git a/public/serviceWorkerRegister.js b/public/serviceWorkerRegister.js index 8405f21aaab..737205bb85f 100644 --- a/public/serviceWorkerRegister.js +++ b/public/serviceWorkerRegister.js @@ -1,9 +1,27 @@ if ('serviceWorker' in navigator) { - window.addEventListener('load', function () { + window.addEventListener('DOMContentLoaded', function () { navigator.serviceWorker.register('/serviceWorker.js').then(function (registration) { console.log('ServiceWorker registration successful with scope: ', registration.scope); + const sw = registration.installing || registration.waiting + if (sw) { + sw.onstatechange = function() { + if (sw.state === 'installed') { + // SW installed. Reload for SW intercept serving SW-enabled page. + console.log('ServiceWorker installed reload page'); + window.location.reload(); + } + } + } + registration.update().then(res => { + console.log('ServiceWorker registration update: ', res); + }); + window._SW_ENABLED = true }, function (err) { console.error('ServiceWorker registration failed: ', err); }); + navigator.serviceWorker.addEventListener('controllerchange', function() { + console.log('ServiceWorker controllerchange '); + window.location.reload(true); + }); }); -} \ No newline at end of file +}