Skip to content

Commit

Permalink
feat: support real stop response (Close #157)
Browse files Browse the repository at this point in the history
fix: access token, after the first chat, refreshing the page makes it impossible to continue talking
  • Loading branch information
Kerwin committed May 27, 2023
1 parent b826368 commit 9f2f5c3
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 8 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "chatgpt-web",
"version": "2.13.0",
"version": "2.13.3",
"private": false,
"description": "ChatGPT Web",
"author": "ChenZhaoYu <[email protected]>",
Expand Down
23 changes: 20 additions & 3 deletions service/src/chatgpt/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,12 @@ export async function initApi(key: KeyConfig, chatModel: CHATMODEL) {
return new ChatGPTUnofficialProxyAPI({ ...options })
}
}

const processThreads: { userId: string; abort: AbortController; messageId: string }[] = []
async function chatReplyProcess(options: RequestOptions) {
const model = options.chatModel
const key = options.key
const userId = options.userId
const messageId = options.messageId
if (key == null || key === undefined)
throw new Error('没有可用的配置。请再试一次 | No available configuration. Please try again.')

Expand All @@ -107,6 +109,10 @@ async function chatReplyProcess(options: RequestOptions) {
options = { ...lastContext }
}
const api = await initApi(key, model)

const abort = new AbortController()
options.abortSignal = abort.signal
processThreads.push({ userId, abort, messageId })
const response = await api.sendMessage(message, {
...options,
onProgress: (partialResponse) => {
Expand All @@ -125,9 +131,22 @@ async function chatReplyProcess(options: RequestOptions) {
}
finally {
releaseApiKey(key)
const index = processThreads.findIndex(d => d.userId === userId)
if (index > -1)
processThreads.splice(index, 1)
}
}

export function abortChatProcess(userId: string) {
const index = processThreads.findIndex(d => d.userId === userId)
if (index <= -1)
return
const messageId = processThreads[index].messageId
processThreads[index].abort.abort()
processThreads.splice(index, 1)
return messageId
}

export function initAuditService(audit: AuditConfig) {
if (!audit || !audit.options || !audit.options.apiKey || !audit.options.apiSecret)
return
Expand Down Expand Up @@ -328,10 +347,8 @@ async function randomKeyConfig(keys: KeyConfig[]): Promise<KeyConfig | null> {
const thisLockedKey = _lockedKeys.filter(d => d.key === thisKey.key)
if (thisLockedKey.length <= 0)
_lockedKeys.push({ key: thisKey.key, count: 1 })

else
thisLockedKey[0].count++

return thisKey
}

Expand Down
2 changes: 2 additions & 0 deletions service/src/chatgpt/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ export interface RequestOptions {
top_p?: number
chatModel: CHATMODEL
key: KeyConfig
userId: string
messageId: string
}

export interface BalanceResponse {
Expand Down
23 changes: 22 additions & 1 deletion service/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import * as dotenv from 'dotenv'
import { ObjectId } from 'mongodb'
import type { RequestProps } from './types'
import type { ChatContext, ChatMessage } from './chatgpt'
import { chatConfig, chatReplyProcess, containsSensitiveWords, getRandomApiKey, initAuditService } from './chatgpt'
import { abortChatProcess, chatConfig, chatReplyProcess, containsSensitiveWords, getRandomApiKey, initAuditService } from './chatgpt'
import { auth, getUserId } from './middleware/auth'
import { clearApiKeyCache, clearConfigCache, getApiKeys, getCacheApiKeys, getCacheConfig, getOriginConfig } from './storage/config'
import type { AuditConfig, CHATMODEL, ChatInfo, ChatOptions, Config, KeyConfig, MailConfig, SiteConfig, UsageResponse, UserInfo } from './storage/model'
Expand Down Expand Up @@ -431,6 +431,8 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
top_p,
chatModel: user.config.chatModel,
key: await getRandomApiKey(user, user.config.chatModel),
userId,
messageId: message._id.toString(),
})
// return the whole response including usage
res.write(`\n${JSON.stringify(result.data)}`)
Expand All @@ -457,13 +459,15 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
await updateChat(message._id as unknown as string,
result.data.text,
result.data.id,
result.data.conversationId,
result.data.detail?.usage as UsageResponse,
previousResponse as [])
}
else {
await updateChat(message._id as unknown as string,
result.data.text,
result.data.id,
result.data.conversationId,
result.data.detail?.usage as UsageResponse)
}

Expand All @@ -481,6 +485,23 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
}
})

router.post('/chat-abort', [auth, limiter], async (req, res) => {
try {
const userId = req.headers.userId.toString()
const { text, messageId, conversationId } = req.body as { text: string; messageId: string; conversationId: string }
const msgId = await abortChatProcess(userId)
await updateChat(msgId,
text,
messageId,
conversationId,
null)
res.send({ status: 'Success', message: 'OK', data: null })
}
catch (error) {
res.send({ status: 'Fail', message: '重置邮件已发送 | Reset email has been sent', data: null })
}
})

router.post('/user-register', async (req, res) => {
try {
const { username, password } = req.body as { username: string; password: string }
Expand Down
3 changes: 2 additions & 1 deletion service/src/storage/mongo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,13 @@ export async function getChatByMessageId(messageId: string) {
return await chatCol.findOne({ 'options.messageId': messageId }) as ChatInfo
}

export async function updateChat(chatId: string, response: string, messageId: string, usage: UsageResponse, previousResponse?: []) {
export async function updateChat(chatId: string, response: string, messageId: string, conversationId: string, usage: UsageResponse, previousResponse?: []) {
const query = { _id: new ObjectId(chatId) }
const update = {
$set: {
'response': response,
'options.messageId': messageId,
'options.conversationId': conversationId,
'options.prompt_tokens': usage?.prompt_tokens,
'options.completion_tokens': usage?.completion_tokens,
'options.total_tokens': usage?.total_tokens,
Expand Down
7 changes: 7 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,13 @@ export function fetchChatAPIProcess<T = any>(
})
}

export function fetchChatStopResponding<T = any>(text: string, messageId: string, conversationId: string) {
return post<T>({
url: '/chat-abort',
data: { text, messageId, conversationId },
})
}

export function fetchChatResponseoHistory<T = any>(roomId: number, uuid: number, index: number) {
return get<T>({
url: '/chat-response-history',
Expand Down
12 changes: 10 additions & 2 deletions src/views/chat/index.vue
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import HeaderComponent from './components/Header/index.vue'
import { HoverButton, SvgIcon } from '@/components/common'
import { useBasicLayout } from '@/hooks/useBasicLayout'
import { useAuthStore, useChatStore, usePromptStore, useUserStore } from '@/store'
import { fetchChatAPIProcess, fetchChatResponseoHistory, fetchUpdateUserChatModel } from '@/api'
import { fetchChatAPIProcess, fetchChatResponseoHistory, fetchChatStopResponding, fetchUpdateUserChatModel } from '@/api'
import { t } from '@/locales'
import { debounce } from '@/utils/functions/debounce'
import IconPrompt from '@/icons/Prompt.vue'
Expand All @@ -22,6 +22,7 @@ import type { CHATMODEL } from '@/components/common/Setting/model'
const Prompt = defineAsyncComponent(() => import('@/components/common/Setting/Prompt.vue'))
let controller = new AbortController()
let lastChatInfo: any = {}
const openLongReply = import.meta.env.VITE_GLOB_OPEN_LONG_REPLY === 'true'
Expand Down Expand Up @@ -138,6 +139,7 @@ async function onConversation() {
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
lastChatInfo = data
const usage = (data.detail && data.detail.usage)
? {
completion_tokens: data.detail.usage.completion_tokens || null,
Expand Down Expand Up @@ -284,6 +286,7 @@ async function onRegenerate(index: number) {
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
lastChatInfo = data
const usage = (data.detail && data.detail.usage)
? {
completion_tokens: data.detail.usage.completion_tokens || null,
Expand Down Expand Up @@ -464,10 +467,11 @@ function handleEnter(event: KeyboardEvent) {
}
}
function handleStop() {
async function handleStop() {
if (loading.value) {
controller.abort()
loading.value = false
await fetchChatStopResponding(lastChatInfo.text, lastChatInfo.id, lastChatInfo.conversationId)
}
}
Expand Down Expand Up @@ -581,6 +585,10 @@ async function handleSyncChatModel(chatModel: CHATMODEL) {
onMounted(() => {
firstLoading.value = true
handleSyncChat()
const chatModels = authStore.session?.chatModels
if (chatModels != null && chatModels.filter(d => d.value === userStore.userInfo.config.chatModel).length <= 0)
ms.error('你选择的模型已不存在,请重新选择 | The selected model not exists, please choose again.', { duration: 7000 })
})
watch(() => chatStore.active, (newVal, oldVal) => {
Expand Down

0 comments on commit 9f2f5c3

Please sign in to comment.