Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

using cache storage store image data #5013 #5061

Merged
merged 7 commits into from
Jul 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion app/client/platforms/alibaba.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import {
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { getMessageTextContent } from "@/app/utils";

export interface OpenAIListModelResponse {
object: string;
Expand Down
9 changes: 7 additions & 2 deletions app/client/platforms/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import { ChatOptions, getHeaders, LLMApi, MultimodalContent } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
import { RequestMessage } from "@/app/typing";
import {
EventStreamContentType,
fetchEventSource,
Expand All @@ -12,6 +11,7 @@ import {
import Locale from "../../locales";
import { prettyObject } from "@/app/utils/format";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { preProcessImageContent } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";

export type MultiBlockContent = {
Expand Down Expand Up @@ -93,7 +93,12 @@ export class ClaudeApi implements LLMApi {
},
};

const messages = [...options.messages];
// try get base64image from local cache image_url
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = await preProcessImageContent(v.content);
messages.push({ role: v.role, content });
}

const keys = ["system", "user"];

Expand Down
10 changes: 9 additions & 1 deletion app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {
getMessageImages,
isVisionModel,
} from "@/app/utils";
import { preProcessImageContent } from "@/app/utils/chat";

export class GeminiProApi implements LLMApi {
path(path: string): string {
Expand Down Expand Up @@ -56,7 +57,14 @@ export class GeminiProApi implements LLMApi {
async chat(options: ChatOptions): Promise<void> {
const apiClient = this;
let multimodal = false;
const messages = options.messages.map((v) => {

// try get base64image from local cache image_url
const _messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = await preProcessImageContent(v.content);
_messages.push({ role: v.role, content });
}
const messages = _messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }];
if (isVisionModel(options.config.model)) {
const images = getMessageImages(v);
Expand Down
12 changes: 8 additions & 4 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
} from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { collectModelsWithDefaultModel } from "@/app/utils/model";
import { preProcessImageContent } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";

import {
Expand Down Expand Up @@ -105,10 +106,13 @@ export class ChatGPTApi implements LLMApi {

async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages = options.messages.map((v) => ({
role: v.role,
content: visionModel ? v.content : getMessageTextContent(v),
}));
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = visionModel
? await preProcessImageContent(v.content)
: getMessageTextContent(v);
messages.push({ role: v.role, content });
}

const modelConfig = {
...useAppConfig.getState().modelConfig,
Expand Down
6 changes: 3 additions & 3 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ import {
isVisionModel,
} from "../utils";

import { compressImage } from "@/app/utils/chat";
import { uploadImage as uploadImageRemote } from "@/app/utils/chat";

import dynamic from "next/dynamic";

Expand Down Expand Up @@ -1167,7 +1167,7 @@ function _Chat() {
...(await new Promise<string[]>((res, rej) => {
setUploading(true);
const imagesData: string[] = [];
compressImage(file, 256 * 1024)
uploadImageRemote(file)
.then((dataUrl) => {
imagesData.push(dataUrl);
setUploading(false);
Expand Down Expand Up @@ -1209,7 +1209,7 @@ function _Chat() {
const imagesData: string[] = [];
for (let i = 0; i < files.length; i++) {
const file = event.target.files[i];
compressImage(file, 256 * 1024)
uploadImageRemote(file)
.then((dataUrl) => {
imagesData.push(dataUrl);
if (
Expand Down
5 changes: 4 additions & 1 deletion app/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ export const BYTEDANCE_BASE_URL = "https://ark.cn-beijing.volces.com";

export const ALIBABA_BASE_URL = "https://dashscope.aliyuncs.com/api/";

export const CACHE_URL_PREFIX = "/api/cache";
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;

export enum Path {
Home = "/",
Chat = "/chat",
Expand Down Expand Up @@ -239,7 +242,7 @@ const baiduModels = [
"ernie-speed-128k",
"ernie-speed-8k",
"ernie-lite-8k",
"ernie-tiny-8k"
"ernie-tiny-8k",
];

const bytedanceModels = [
Expand Down
108 changes: 99 additions & 9 deletions app/utils/chat.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import heic2any from "heic2any";
import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant";
import { RequestMessage } from "@/app/client/api";

export function compressImage(file: File, maxSize: number): Promise<string> {
export function compressImage(file: Blob, maxSize: number): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (readerEvent: any) => {
Expand Down Expand Up @@ -40,15 +41,104 @@ export function compressImage(file: File, maxSize: number): Promise<string> {
reader.onerror = reject;

if (file.type.includes("heic")) {
heic2any({ blob: file, toType: "image/jpeg" })
.then((blob) => {
reader.readAsDataURL(blob as Blob);
})
.catch((e) => {
reject(e);
});
try {
const heic2any = require("heic2any");
heic2any({ blob: file, toType: "image/jpeg" })
.then((blob: Blob) => {
reader.readAsDataURL(blob);
})
.catch((e: any) => {
reject(e);
});
} catch (e) {
reject(e);
}
}

reader.readAsDataURL(file);
});
}

export async function preProcessImageContent(
content: RequestMessage["content"],
) {
if (typeof content === "string") {
return content;
}
const result = [];
for (const part of content) {
if (part?.type == "image_url" && part?.image_url?.url) {
try {
const url = await cacheImageToBase64Image(part?.image_url?.url);
result.push({ type: part.type, image_url: { url } });
} catch (error) {
console.error("Error processing image URL:", error);
}
} else {
result.push({ ...part });
}
}
return result;
}

const imageCaches: Record<string, string> = {};
export function cacheImageToBase64Image(imageUrl: string) {
if (imageUrl.includes(CACHE_URL_PREFIX)) {
if (!imageCaches[imageUrl]) {
const reader = new FileReader();
return fetch(imageUrl, {
method: "GET",
mode: "cors",
credentials: "include",
})
.then((res) => res.blob())
.then(
async (blob) =>
(imageCaches[imageUrl] = await compressImage(blob, 256 * 1024)),
); // compressImage
}
lloydzhou marked this conversation as resolved.
Show resolved Hide resolved
return Promise.resolve(imageCaches[imageUrl]);
}
lloydzhou marked this conversation as resolved.
Show resolved Hide resolved
return Promise.resolve(imageUrl);
}

export function base64Image2Blob(base64Data: string, contentType: string) {
const byteCharacters = atob(base64Data);
const byteNumbers = new Array(byteCharacters.length);
for (let i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
return new Blob([byteArray], { type: contentType });
}

export function uploadImage(file: File): Promise<string> {
if (!window._SW_ENABLED) {
// if serviceWorker register error, using compressImage
return compressImage(file, 256 * 1024);
}
const body = new FormData();
body.append("file", file);
return fetch(UPLOAD_URL, {
method: "post",
body,
mode: "cors",
credentials: "include",
})
.then((res) => res.json())
.then((res) => {
console.log("res", res);
if (res?.code == 0 && res?.data) {
return res?.data;
}
throw Error(`upload Error: ${res?.msg}`);
});
}
lloydzhou marked this conversation as resolved.
Show resolved Hide resolved

export function removeImage(imageUrl: string) {
return fetch(imageUrl, {
method: "DELETE",
mode: "cors",
credentials: "include",
});
}
46 changes: 45 additions & 1 deletion public/serviceWorker.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,59 @@
const CHATGPT_NEXT_WEB_CACHE = "chatgpt-next-web-cache";
const CHATGPT_NEXT_WEB_FILE_CACHE = "chatgpt-next-web-file";
let a="useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";let nanoid=(e=21)=>{let t="",r=crypto.getRandomValues(new Uint8Array(e));for(let n=0;n<e;n++)t+=a[63&r[n]];return t};

self.addEventListener("activate", function (event) {
console.log("ServiceWorker activated.");
});

self.addEventListener("install", function (event) {
self.skipWaiting(); // enable new version
event.waitUntil(
caches.open(CHATGPT_NEXT_WEB_CACHE).then(function (cache) {
return cache.addAll([]);
}),
);
});

self.addEventListener("fetch", (e) => {});
async function upload(request, url) {
const formData = await request.formData()
const file = formData.getAll('file')[0]
let ext = file.name.split('.').pop()
if (ext === 'blob') {
ext = file.type.split('/').pop()
}
const fileUrl = `${url.origin}/api/cache/${nanoid()}.${ext}`
// console.debug('file', file, fileUrl, request)
const cache = await caches.open(CHATGPT_NEXT_WEB_FILE_CACHE)
await cache.put(new Request(fileUrl), new Response(file, {
headers: {
'content-type': file.type,
'content-length': file.size,
'cache-control': 'no-cache', // file already store in disk
'server': 'ServiceWorker',
}
}))
return Response.json({ code: 0, data: fileUrl })
}
lloydzhou marked this conversation as resolved.
Show resolved Hide resolved

async function remove(request, url) {
const cache = await caches.open(CHATGPT_NEXT_WEB_FILE_CACHE)
const res = await cache.delete(request.url)
return Response.json({ code: 0 })
}

self.addEventListener("fetch", (e) => {
const url = new URL(e.request.url);
if (/^\/api\/cache/.test(url.pathname)) {
if ('GET' == e.request.method) {
e.respondWith(caches.match(e.request))
}
if ('POST' == e.request.method) {
e.respondWith(upload(e.request, url))
}
if ('DELETE' == e.request.method) {
e.respondWith(remove(e.request, url))
}
}
});

22 changes: 20 additions & 2 deletions public/serviceWorkerRegister.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,27 @@
if ('serviceWorker' in navigator) {
window.addEventListener('load', function () {
window.addEventListener('DOMContentLoaded', function () {
navigator.serviceWorker.register('/serviceWorker.js').then(function (registration) {
console.log('ServiceWorker registration successful with scope: ', registration.scope);
const sw = registration.installing || registration.waiting
if (sw) {
sw.onstatechange = function() {
if (sw.state === 'installed') {
// SW installed. Reload for SW intercept serving SW-enabled page.
console.log('ServiceWorker installed reload page');
window.location.reload();
}
}
}
registration.update().then(res => {
console.log('ServiceWorker registration update: ', res);
});
window._SW_ENABLED = true
}, function (err) {
console.error('ServiceWorker registration failed: ', err);
});
navigator.serviceWorker.addEventListener('controllerchange', function() {
console.log('ServiceWorker controllerchange ');
window.location.reload(true);
});
});
}
}
Loading