Skip to content

Commit

Permalink
Merge pull request ChatGPTNextWeb#5819 from ConnectAI-E/fix-gemini-su…
Browse files Browse the repository at this point in the history
…mmary

Fix gemini summary
  • Loading branch information
lloydzhou authored Nov 13, 2024
2 parents a392daa + b41c012 commit b08ce56
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";

export class GeminiProApi implements LLMApi {
path(path: string): string {
path(path: string, shouldStream = false): string {
const accessStore = useAccessStore.getState();

let baseUrl = "";
Expand All @@ -51,15 +51,18 @@ export class GeminiProApi implements LLMApi {
console.log("[Proxy Endpoint] ", baseUrl, path);

let chatPath = [baseUrl, path].join("/");
if (shouldStream) {
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
}

chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
return chatPath;
}
extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res);

return (
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
res?.at(0)?.candidates?.at(0)?.content?.parts.at(0)?.text ||
res?.error?.message ||
""
);
Expand Down Expand Up @@ -166,7 +169,10 @@ export class GeminiProApi implements LLMApi {
options.onController?.(controller);
try {
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
const chatPath = this.path(Google.ChatPath(modelConfig.model));
const chatPath = this.path(
Google.ChatPath(modelConfig.model),
shouldStream,
);

const chatPayload = {
method: "POST",
Expand Down

0 comments on commit b08ce56

Please sign in to comment.