Skip to content

Commit

Permalink
AI chat: Added an option to prevent the bots from talking as the play…
Browse files Browse the repository at this point in the history
…er by only using the response up the point the player starts talking
  • Loading branch information
mostlikely4r committed Nov 19, 2024
1 parent 233bb23 commit 10e3b1e
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 22 deletions.
3 changes: 3 additions & 0 deletions playerbot/PlayerbotAIConfig.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -609,6 +609,9 @@ bool PlayerbotAIConfig::Initialize()
std::replace(llmResponseStartPattern.begin(), llmResponseStartPattern.end(), '\'', '\"');
llmResponseEndPattern = config.GetStringDefault("AiPlayerbot.LLMResponseEndPattern", "\"");
std::replace(llmResponseEndPattern.begin(), llmResponseEndPattern.end(), '\'', '\"');

llmPreventTalkingForPlayer = config.GetBoolDefault("AiPlayerbot.LLMPreventTalkingForPlayer", false);

//LLM END

// Gear progression system
Expand Down
1 change: 1 addition & 0 deletions playerbot/PlayerbotAIConfig.h
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,7 @@ class PlayerbotAIConfig
//LM BEGIN
std::string llmApiEndpoint, llmApiKey, llmApiJson, llmPrePrompt, llmPrompt, llmPostPrompt, llmResponseStartPattern, llmResponseEndPattern;
uint32 llmContextLength;
bool llmPreventTalkingForPlayer;
ParsedUrl llmEndPointUrl;
//LM END

Expand Down
8 changes: 0 additions & 8 deletions playerbot/PlayerbotLLMInterface.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,6 @@
#include "PlayerbotLLMInterface.h"
#include "PlayerbotAIConfig.h"


#include <iostream>
#include <string>
#include <sstream>

#include <iostream>
#include <string>
#include <sstream>
Expand All @@ -24,9 +19,6 @@
#include <netinet/in.h>
#include <cstring>
#endif



std::string PlayerbotLLMInterface::Generate(const std::string& prompt) {
const int bufferSize = 4096;
char buffer[bufferSize];
Expand Down
17 changes: 6 additions & 11 deletions playerbot/PlayerbotLLMInterface.h
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
namespace ai
class PlayerbotLLMInterface
{
class PlayerbotLLMInterface
{
public:
PlayerbotLLMInterface() {}
static std::string Generate(const std::string& prompt);
public:
PlayerbotLLMInterface() {}
static std::string Generate(const std::string& prompt);

static std::vector<std::string> ParseResponse(const std::string& response, std::string startPattern, std::string endPattern);
private:

};
}
static std::vector<std::string> ParseResponse(const std::string& response, std::string startPattern, std::string endPattern);
};
14 changes: 11 additions & 3 deletions playerbot/strategy/actions/SayAction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32
placeholders["<player level>"] = std::to_string(player->GetLevel());
placeholders["<player class>"] = ai->GetChatHelper()->formatClass(player->getClass());
placeholders["<player race>"] = ai->GetChatHelper()->formatRace(player->getRace());

#ifdef MANGOSBOT_ZERO
placeholders["<expansion name>"] = "Vanilla";
#endif
Expand Down Expand Up @@ -279,7 +280,7 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32

json = BOT_TEXT2(json, placeholders);

std::string playerName;
std::string playerName = player->GetName();

uint32 type = CHAT_MSG_WHISPER;

Expand All @@ -288,7 +289,6 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32
case ChatChannelSource::SRC_WHISPER:
{
type = CHAT_MSG_WHISPER;
playerName = player->GetName();
break;
}
case ChatChannelSource::SRC_SAY:
Expand Down Expand Up @@ -321,10 +321,18 @@ void ChatReplyAction::ChatReplyDo(Player* bot, uint32 type, uint32 guid1, uint32
packet_template << type;
packet_template << lang;

if (!playerName.empty())
if (type == CHAT_MSG_WHISPER)
packet_template << playerName;

std::string response = PlayerbotLLMInterface::Generate(json);

if (sPlayerbotAIConfig.llmPreventTalkingForPlayer)
{
size_t pos = response.find(playerName + ":");
if (pos != std::string::npos)
response = response.substr(0, pos) + sPlayerbotAIConfig.llmResponseEndPattern;
}

std::vector<std::string> lines = PlayerbotLLMInterface::ParseResponse(response, sPlayerbotAIConfig.llmResponseStartPattern, sPlayerbotAIConfig.llmResponseEndPattern);

std::vector<WorldPacket> packets;
Expand Down

0 comments on commit 10e3b1e

Please sign in to comment.