Skip to content

Commit

Permalink
Merge branch 'enh-1514/macos-wingman-qne' into dev/2.0.0
Browse files Browse the repository at this point in the history
  • Loading branch information
dvorka committed Feb 4, 2024
2 parents 9298f39 + 3409c1c commit affb05a
Show file tree
Hide file tree
Showing 6 changed files with 136 additions and 47 deletions.
6 changes: 6 additions & 0 deletions app/app.pro
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ message("= MindForger QMake configuration ==========================")
message("Qt version: $$QT_VERSION")

QT += widgets
QT += network

mfdebug|mfunits {
DEFINES += DO_MF_DEBUG
Expand Down Expand Up @@ -88,6 +89,7 @@ win32 {
else:CONFIG(debug, debug|release): LIBS += -L$$PWD/../lib/debug -lmindforger
} else {
# Linux and macOS
# TODO split macOS
LIBS += -L$$OUT_PWD/../lib -lmindforger -lcurl
}

Expand Down Expand Up @@ -187,6 +189,9 @@ INCLUDEPATH += ./src/qt/spelling
#
win32{
QMAKE_CXXFLAGS += /MP
!mfnoccache {
QMAKE_CXX = ccache $$QMAKE_CXX
}
} else {
# linux and macos
mfnoccache {
Expand Down Expand Up @@ -501,5 +506,6 @@ win32 {
message(DEFINES of app.pro build: $$DEFINES)
message(QMAKE_EXTRA_TARGETS of app.pro build: $$QMAKE_EXTRA_TARGETS)
message(QT of app.pro build: $$QT)
message(PATH is: $$(PATH))

# eof
23 changes: 23 additions & 0 deletions app/qnetwork-get-test.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"id": "chatcmpl-8oIy1BN3YeHaGgc3AIaYcEsOWHXbG",
"object": "chat.completion",
"created": 1707000001,
"model": "gpt-3.5-turbo-0613",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "Hello! How can I assist you today?"
},
"logprobs": null,
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": 18,
"completion_tokens": 9,
"total_tokens": 27
},
"system_fingerprint": null
}
16 changes: 10 additions & 6 deletions lib/lib.pro
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,14 @@
TARGET = mindforger
TEMPLATE = lib
CONFIG += staticlib
CONFIG -= qt

# Qt Network as CURL replacement on Win - add Qt to libmindforger!
win32 {
CONFIG += qt
QT += network
}
#win32|macx {
# Qt Network as CURL replacement on Win - add Qt to libmindforger!
CONFIG += qt
QT += network
#} else {
# CONFIG -= qt
#}

# Dependencies:
# - INCLUDEPATH is used during compilation to find included header files.
Expand Down Expand Up @@ -55,6 +56,9 @@ mfdebug|mfunits {
# compiler options (qmake CONFIG+=mfnoccache ...)
win32{
QMAKE_CXXFLAGS += /MP
!mfnoccache {
QMAKE_CXX = ccache $$QMAKE_CXX
}
} else {
# linux and macos
mfnoccache {
Expand Down
7 changes: 6 additions & 1 deletion lib/src/gear/string_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ char** stringSplit(const char* s, const char delimiter);
char** stringSplit(const char* s, const char delimiter, u_int16_t resultBaseSize, u_int16_t resultIncSize);
std::vector<std::string> stringSplit(const std::string s, const std::string regexDelimiter);

#if defined(__APPLE__) || defined(_WIN32)
#if defined(_WIN32)
static inline std::string stringToUtf8(std::string& codepage_str)
{
int size = MultiByteToWideChar(
Expand Down Expand Up @@ -89,6 +89,11 @@ static inline std::string stringToUtf8(std::string& codepage_str)

return utf8_str;
}
#elif defined(__APPLE__)
static inline std::string stringToUtf8(std::string& codepage_str)
{
return codepage_str;
}
#endif

/**
Expand Down
129 changes: 90 additions & 39 deletions lib/src/mind/ai/llm/openai_wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,19 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) {
/*
OpenAI API JSon request example (see unit test):
...
{
"messages": [
{
"content": "You are a helpful assistant.",
"role": "system"
},
{
"content": "Hey hello! I'm MindForger user - how can you help me?",
"role": "user"
}
],
"model": "gpt-3.5-turbo"
}
*/
nlohmann::json messageSystemJSon{};
Expand Down Expand Up @@ -102,56 +114,67 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) {
<< "<<<"
<< endl);

#ifdef WIN32
#if defined(_WIN32) || defined(__APPLE__)
/* Qt Networking examples:
*
* - https://forum.qt.io/topic/116601/qnetworkaccessmanager-reply-is-always-empty/7
* - https://community.openai.com/t/qt-interface-w-chatgpt-api/354900
* - https://gist.github.com/FONQRI/d8fb13150c1e6760f1b1617730559418
*/

// request
QNetworkRequest request{};
QNetworkAccessManager networkManager;

// request: headers
request.setUrl(
QUrl("https://api.openai.com/v1/chat/completions"));
QNetworkRequest request(QUrl("https://api.openai.com/v1/chat/completions"));
request.setHeader(
QNetworkRequest::ContentTypeHeader,
QVariant("application/json"));
string apiKeyUtf8{stringToUtf8(apiKey)};
"application/json");
request.setRawHeader(
"Authorization",
("Bearer " + apiKeyUtf8).c_str());

// request body
string requestJSonStrUtf8{stringToUtf8(requestJSonStr)};
QByteArray requestBody(
requestJSonStrUtf8.c_str());
"Bearer " + QString::fromStdString(apiKey).toUtf8());

// create a network access manager
QNetworkAccessManager manager;

// request: POST
QNetworkReply* reply = manager.post(request, requestBody);

// connect to the finished signal to handle the response
QObject::connect(
reply, &QNetworkReply::finished,
[&]()
{
if (reply->error() == QNetworkReply::NoError) {
command.status = m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_OK;
command.httpResponse = QString(reply->readAll()).toStdString();
} else {
QNetworkReply* reply = networkManager.post(
request,
requestJSonStr.c_str()
);
QEventLoop loop;
QObject::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
loop.exec();
reply->deleteLater();

command.status = m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_OK;

// response: error handling
auto error = reply->error();
if(error != QNetworkReply::NoError) {
command.errorMessage =
"Error: request to OpenAI Wingman provider failed due a network error - " +
reply->errorString().toStdString();
MF_DEBUG(command.errorMessage << endl);
command.status = m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_ERROR;
}
QByteArray read;
if(command.status == m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_OK) {
read = reply->readAll();

if(read.isEmpty()) {
command.errorMessage =
"Error: Request to OpenAI Wingman provider failed - response is empty'";
MF_DEBUG(command.errorMessage << endl);
command.status = m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_ERROR;
command.errorMessage = QString(reply->readAll()).toStdString();
}
});
}

// delete the network reply when it's finished
QObject::connect(
reply, &QNetworkReply::finished,
reply, &QNetworkReply::deleteLater);
#else
// response: successful response processing
if(command.status == m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_OK) {
QString qCommandResponse = QString{read};
command.httpResponse = qCommandResponse.toStdString();
command.errorMessage.clear();
command.status = m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_OK;
MF_DEBUG(
"Successful OpenAI Wingman provider response:" << endl <<
" '" << command.httpResponse << "'" << endl);
}
#else
// set up cURL options
command.httpResponse.clear();
curl_easy_setopt(
Expand Down Expand Up @@ -189,7 +212,10 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) {

// finish error handling (shared by QNetwork/CURL)
if(command.status == WingmanStatusCode::WINGMAN_STATUS_CODE_ERROR) {
std::cerr << "Error: Wingman OpenAI cURL request failed: " << command.errorMessage << endl;
std::cerr <<
"Error: Wingman OpenAI cURL/QtNetwork request failed (error message/HTTP response):" << endl <<
" '" << command.errorMessage << "'" << endl <<
" '" << command.httpResponse << "'" << endl;

command.httpResponse.clear();
command.answerHtml.clear();
Expand Down Expand Up @@ -226,7 +252,26 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) {
"system_fingerprint": null
}
*/
auto httpResponseJSon = nlohmann::json::parse(command.httpResponse);

// parse response string to JSon object
nlohmann::json httpResponseJSon;
try {
httpResponseJSon = nlohmann::json::parse(command.httpResponse);
} catch (...) {
// catch ALL exceptions
MF_DEBUG(
"Error: unable to parse OpenAI JSon response:" << endl <<
"'" << command.httpResponse << "'" << endl
);

command.status = WingmanStatusCode::WINGMAN_STATUS_CODE_ERROR;
command.errorMessage = "Error: unable to parse OpenAI JSon response: '" + command.httpResponse + "'";
command.answerHtml.clear();
command.answerTokens = 0;
command.answerLlmModel = llmModel;

return;
}

MF_DEBUG(
"OpenAiWingman::curlGet() parsed response:" << endl
Expand Down Expand Up @@ -276,11 +321,17 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) {
command.errorMessage.assign(
"OpenAI API HTTP required failed with finish_reason: "
+ statusStr);
command.answerHtml.clear();
command.answerTokens = 0;
command.answerLlmModel = llmModel;
}
MF_DEBUG(" status: " << command.status << endl);
}
} else {
command.status = m8r::WingmanStatusCode::WINGMAN_STATUS_CODE_ERROR;
command.answerHtml.clear();
command.answerTokens = 0;
command.answerLlmModel = llmModel;
if(
httpResponseJSon.contains("error")
&& httpResponseJSon["error"].contains("message")
Expand Down
2 changes: 1 addition & 1 deletion lib/src/mind/ai/llm/openai_wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
#include <string>

// HTTP client: CURL on Linux, Qt Network on macOS and Win
#ifdef _WIN32
#if defined(_WIN32) || defined(__APPLE__)
#include <QtNetwork>
#else
#include "curl/curl.h"
Expand Down

0 comments on commit affb05a

Please sign in to comment.