Skip to content

Commit

Permalink
Few bug ffxes in AI
Browse files Browse the repository at this point in the history
  • Loading branch information
LunaUrsa committed Feb 19, 2024
1 parent b69338c commit 6e3d930
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 12 deletions.
24 changes: 24 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"eslint.codeActionsOnSave.rules": null,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
},
"eslint.validate": [
"javascript",
"typescript",
],
"editor.tabSize": 2,
"cSpell.words": [
"AILOG",
"Blurple",
"bottest",
"logit",
"MDMA",
"modlog",
"moodle",
"Moonbear",
"openai",
"tripbot",
"WOLFDEN"
],
}
9 changes: 7 additions & 2 deletions src/discord/commands/global/d.ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2281,12 +2281,12 @@ export async function aiReaction(
|| thumbsDownEmojis.includes(messageReaction.emoji.name as string)
)
) {
// log.debug(F, `Someone reacted to tripbot's message with an audit emoji (${messageReaction.emoji.name})`);
log.debug(F, `Someone reacted to tripbot's message with an audit emoji (${messageReaction.emoji.name})`);

const channelAiVoteLog = await discordClient.channels.fetch(env.CHANNEL_AIVOTELOG) as TextChannel;
const action = thumbsUpEmojis.includes(messageReaction.emoji.name as string) ? 'approve' : 'reject';

const auditLimit = env.NODE_ENV === 'production' ? 4 : 2;
const auditLimit = env.NODE_ENV === 'production' ? 4 : 3;
// log.debug(F, `Audit limit is ${auditLimit}, emoji count is ${messageReaction.count}`);
if (messageReaction.count === auditLimit) {
// log.debug(F, `Audit limit reached (${auditLimit})`);
Expand Down Expand Up @@ -2318,6 +2318,10 @@ export async function aiReaction(
},
});

log.debug(F, `personaData: ${JSON.stringify(personaData, null, 2)}`);

log.debug(F, `Updating db.ai_personas with ${action} vote`);

await db.ai_personas.update({
where: {
id: personaData.id,
Expand All @@ -2335,6 +2339,7 @@ export async function aiReaction(
}),
});

log.debug(F, 'Sending message to vote room');
await channelAiVoteLog.send({
embeds: [embedTemplate()
.setTitle(`AI ${action}`)
Expand Down
52 changes: 42 additions & 10 deletions src/global/commands/g.ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ async function googleAiConversation(
promptTokens: number,
completionTokens: number,
}> {
log.debug(F, `googleAiConversation | aiPersona: ${JSON.stringify(aiPersona, null, 2)}`);
// log.debug(F, `googleAiConversation | aiPersona: ${JSON.stringify(aiPersona, null, 2)}`);
// const response = '';
const promptTokens = 0;
const completionTokens = 0;
Expand Down Expand Up @@ -347,7 +347,12 @@ async function googleAiConversation(
generationConfig,
safetySettings,
});
return { response: result.response.text(), promptTokens, completionTokens };
try {
return { response: result.response.text(), promptTokens, completionTokens };
} catch (error) {
log.error(F, `Error sending message: ${error}`);
return { response: (error as Error).message, promptTokens, completionTokens };
}
} catch (error) {
log.error(F, `Error fetching data: ${error}`);
}
Expand Down Expand Up @@ -385,14 +390,14 @@ async function googleAiConversation(
},
];
}
log.debug(F, `userHistory: ${JSON.stringify(userHistory, null, 2)}`);
// log.debug(F, `userHistory: ${JSON.stringify(userHistory, null, 2)}`);

const chat = model.startChat({
history: userHistory,
generationConfig,
safetySettings,
});
log.debug(F, `chat: ${JSON.stringify(chat, null, 2)}`);
// log.debug(F, `chat: ${JSON.stringify(chat, null, 2)}`);

let result = {} as GenerateContentResult;
try {
Expand All @@ -402,7 +407,7 @@ async function googleAiConversation(
return { response: (error as Error).message, promptTokens, completionTokens };
}

log.debug(F, `result: ${JSON.stringify(result, null, 2)}`);
// log.debug(F, `result: ${JSON.stringify(result, null, 2)}`);

// Update the history with the message and this response
userHistory.push({
Expand All @@ -413,7 +418,7 @@ async function googleAiConversation(
role: 'model',
parts: result.response.text(),
});
log.debug(F, `newUserHistory: ${JSON.stringify(userHistory, null, 2)}`);
// log.debug(F, `newUserHistory: ${JSON.stringify(userHistory, null, 2)}`);

// Save the user's history
await db.users.update({
Expand Down Expand Up @@ -589,10 +594,37 @@ async function openAiConversation(
}

// Add the message to the thread
const message = await openAi.beta.threads.messages.create(
thread.id,
messages[0],
);

try {
const message = await openAi.beta.threads.messages.create(
thread.id,
messages[0],
);
} catch (error) {
log.error(F, `Error sending message: ${error}`);
console.log(error);

// Get all the runs
const runs = await openAi.beta.threads.runs.list(thread.id, {
limit: 1,
});

// The most recent run is the first one in the sorted array
const recentRun = runs.data[0];

// If the most recent run is in progress, stop it
if (recentRun.status === 'in_progress') {
log.debug(F, 'Stopping the run');
await openAi.beta.threads.runs.cancel(thread.id, recentRun.id);
}

// Add the message to the thread
const message = await openAi.beta.threads.messages.create(
thread.id,
messages[0],
);
}

// log.debug(F, `message: ${JSON.stringify(message, null, 2)}`);

log.debug(F, `Starting new run with assistant: ${assistant.id} and thread: ${thread.id}`);
Expand Down

0 comments on commit 6e3d930

Please sign in to comment.