From 1a588cc9ccd9f3e4c720b734af3356255b04ac6d Mon Sep 17 00:00:00 2001 From: theimperious1 Date: Sun, 10 Nov 2024 01:14:46 -0500 Subject: [PATCH] Fix formatting, other lint complaints. Also resolved warnings in d.admin from console logs. --- src/discord/commands/global/d.ai.ts | 2 +- src/discord/commands/guild/d.admin.ts | 12 +- src/global/commands/g.ai.ts | 174 ++++++++++++++------------ 3 files changed, 103 insertions(+), 85 deletions(-) diff --git a/src/discord/commands/global/d.ai.ts b/src/discord/commands/global/d.ai.ts index 8cf5066b..83d11d69 100644 --- a/src/discord/commands/global/d.ai.ts +++ b/src/discord/commands/global/d.ai.ts @@ -48,7 +48,7 @@ import { import { SlashCommand } from '../../@types/commandDef'; import { embedTemplate } from '../../utils/embedTemplate'; import commandContext from '../../utils/context'; -import aiChat, { aiModerate, handleAiMessageQueue } from '../../../global/commands/g.ai'; +import { aiModerate, handleAiMessageQueue } from '../../../global/commands/g.ai'; /* TODO * only direct @ message should trigger a response diff --git a/src/discord/commands/guild/d.admin.ts b/src/discord/commands/guild/d.admin.ts index 7f93e2f6..95741e51 100644 --- a/src/discord/commands/guild/d.admin.ts +++ b/src/discord/commands/guild/d.admin.ts @@ -83,11 +83,11 @@ async function setAvatar(interaction: ChatInputCommandInteraction, avatarUrl: st 'Content-Type': 'application/json', }, }).then(() => { - console.log('Avatar set successfully'); + log.info(F, 'Avatar set successfully'); interaction.editReply('Avatar set successfully'); return true; }).catch((error: Error) => { - console.error(`Error setting avatar: ${error.message}`); + log.error(F, `Error setting avatar: ${error.message}`); interaction.editReply('Error setting avatar'); return false; }); @@ -107,11 +107,11 @@ async function setBanner(interaction: ChatInputCommandInteraction, bannerUrl: st 'Content-Type': 'application/json', }, }).then(() => { - console.log('Banner set successfully'); + log.info(F, 'Banner set successfully'); interaction.editReply('Banner set successfully'); return true; }).catch((error: Error) => { - console.error(`Error setting banner: ${error.message}`); + log.error(F, `Error setting banner: ${error.message}`); interaction.editReply('Error setting banner'); return false; }); @@ -206,9 +206,9 @@ async function overwriteUserData( total_points: levelPoints, }, }); - console.log(`Update result: ${JSON.stringify(result)}`); + log.info(F, `Update result: ${JSON.stringify(result)}`); } catch (error) { - console.error(`Error updating database: ${(error as Error).message}`); + log.error(F, `Error updating database: ${(error as Error).message}`); } await interaction.editReply(`User level and points updated for category ${category} to level ${level} with ${levelPoints} points.`); diff --git a/src/global/commands/g.ai.ts b/src/global/commands/g.ai.ts index 3b7c7da0..591cfcfd 100644 --- a/src/global/commands/g.ai.ts +++ b/src/global/commands/g.ai.ts @@ -27,18 +27,18 @@ const googleAi = new GoogleGenerativeAI(env.GEMINI_KEY); type UserQueue = { queue: { - aiPersona: ai_personas; - messages: { role: 'user'; content: string }[]; - messageData: Message; - attachmentInfo: { - url: string | null; - mimeType: string | null; - }; - resolve: (value: { - response: string; - promptTokens: number; - completionTokens: number; - }) => void; + aiPersona: ai_personas; + messages: { role: 'user'; content: string }[]; + messageData: Message; + attachmentInfo: { + url: string | null; + mimeType: string | null; + }; + resolve: (value: { + response: string; + promptTokens: number; + completionTokens: number; + }) => void; }[]; isProcessing: boolean; }; @@ -122,72 +122,6 @@ const aiFunctions = [ }, ]; -// Main function for aiChat to handle incoming messages and return a Promise with response data -export function handleAiMessageQueue( - aiPersona: ai_personas, - messages: { role: 'user'; content: string }[], - messageData: Message, - attachmentInfo: { url: string | null; mimeType: string | null } -): Promise<{ - response: string; - promptTokens: number; - completionTokens: number; -}> { - if (!userQueues.has(messageData.author.id)) { - userQueues.set(messageData.author.id, { queue: [], isProcessing: false }); - } - - const userQueue = userQueues.get(messageData.author.id)!; - - // Push the new message data into the user's queue - return new Promise((resolve) => { - userQueue.queue.push({ - aiPersona, - messages, - messageData, - attachmentInfo, - resolve - }); - - // If the user is not currently processing, start processing - if (!userQueue.isProcessing) { - processNextMessage(messageData.author.id); - } - }); -} - -// Function to process the next message in the user's queue -async function processNextMessage(userId: string) { - const userQueue = userQueues.get(userId); - if (!userQueue || userQueue.queue.length === 0) { - userQueue!.isProcessing = false; - return; - } - - userQueue.isProcessing = true; // Mark as processing - - // Get the next message in the queue - const { aiPersona, messages, messageData, attachmentInfo, resolve } = userQueue.queue.shift()!; - - try { - // Call the aiChat function and destructure the needed response data - const { response, promptTokens, completionTokens } = await aiChat( - aiPersona, - messages, - messageData, - attachmentInfo - ); - - resolve({ response, promptTokens, completionTokens }); - } catch (error) { - log.error(F, `Error processing message for user: ${userId} - error: ${error}`); - resolve({ response: "Error", promptTokens: 0, completionTokens: 0 }); - } finally { - // Process the next message after this one is done - processNextMessage(userId); - } -} - export async function aiModerateReport( message: string, ):Promise { @@ -974,6 +908,90 @@ export default async function aiChat( return openAiConversation(aiPersona, messages, messageData); } +// Function to process the next message in the user's queue +async function processNextMessage(userId: string) { + const userQueue = userQueues.get(userId); + + // If userQueue is null or undefined, exit the function immediately + if (!userQueue) return; + + // If the queue is empty, reset isProcessing to false and exit + if (userQueue.queue.length === 0) { + userQueue.isProcessing = false; + return; + } + + userQueue.isProcessing = true; // Mark as processing + + // Ensure the queue has an item before destructuring + const nextMessage = userQueue.queue.shift(); + if (!nextMessage) { + // Handle case where there’s no next message in the queue, if needed + return; + } + + const { + aiPersona, messages, messageData, attachmentInfo, resolve, + } = nextMessage; + + try { + // Call the aiChat function and destructure the needed response data + const { response, promptTokens, completionTokens } = await aiChat( + aiPersona, + messages, + messageData, + attachmentInfo, + ); + + resolve({ response, promptTokens, completionTokens }); + } catch (error) { + log.error(F, `Error processing message for user: ${userId} - error: ${error}`); + resolve({ response: 'Error', promptTokens: 0, completionTokens: 0 }); + } finally { + // Process the next message after this one is done + processNextMessage(userId); + } +} + +// Main function for aiChat to handle incoming messages and return a Promise with response data +export function handleAiMessageQueue( + aiPersona: ai_personas, + messages: { role: 'user'; content: string }[], + messageData: Message, + attachmentInfo: { url: string | null; mimeType: string | null }, +): Promise<{ + response: string; + promptTokens: number; + completionTokens: number; + }> { + if (!userQueues.has(messageData.author.id)) { + userQueues.set(messageData.author.id, { queue: [], isProcessing: false }); + } + + const userQueue = userQueues.get(messageData.author.id); + + if (!userQueue) { + // Return a rejected promise if userQueue is undefined + return Promise.reject(new Error(`User queue could not be initialized for user ${messageData.author.id}`)); + } + + // Push the new message data into the user's queue + return new Promise(resolve => { + userQueue.queue.push({ + aiPersona, + messages, + messageData, + attachmentInfo, + resolve, + }); + + // If the user is not currently processing, start processing + if (!userQueue.isProcessing) { + processNextMessage(messageData.author.id); + } + }); +} + export async function aiFlairMod( aiPersona:ai_personas, messages: OpenAI.Chat.ChatCompletionMessageParam [],