diff --git a/Conversation.cpp b/Conversation.cpp index c70838e..9cdbfee 100644 --- a/Conversation.cpp +++ b/Conversation.cpp @@ -1,71 +1,86 @@ // Conversation.cpp #include "Conversation.h" - +#include #include #include #include #include -#include -#include #include +#include #include -Conversation::Conversation(BHandler* replyTo) { +Conversation::Conversation(BHandler *replyTo) { replyTarget = replyTo; _apiKey = ReadOpenAIKey(); printf("key is: %s", _apiKey.String()); - } -Conversation::~Conversation() { - } +Conversation::~Conversation() {} - - -void Conversation::PrintAsJsonArray(const std::vector& models) { - json output = models; // implicit conversion to JSON array - std::cout << output.dump(2) << std::endl; // pretty-print with 2-space indent +void Conversation::PrintAsJsonArray(const std::vector &models) { + json output = models; // implicit conversion to JSON array + std::cout << output.dump(2) << std::endl; // pretty-print with 2-space indent } void Conversation::sendReply(BMessage message) { - BLooper* looper = replyTarget->Looper(); // get the looper it's attached to + BLooper *looper = replyTarget->Looper(); // get the looper it's attached to - if (looper != nullptr) { - BMessenger messenger(replyTarget, looper); - messenger.SendMessage(&message); - } else { - printf("Handler not attached to a looper.\n"); - } + if (looper != nullptr) { + BMessenger messenger(replyTarget, looper); + messenger.SendMessage(&message); + } else { + printf("Handler not attached to a looper.\n"); + } } - -std::vector Conversation::FilterTextModels(const json& modelsJson) { - std::vector result; - std::regex pattern("gpt|text|davinci|curie|babbage|ada"); - - for (const auto& model : modelsJson["data"]) { - std::string id = model["id"]; - if (std::regex_search(id, pattern) && - id.find("audio") == std::string::npos && - id.find("vision") == std::string::npos && - id.find("dall-e") == std::string::npos) { - result.push_back(id); - } - } - - return result; -} - - -void Conversation::MessageReceived(BMessage* message) { - switch (message->what) { -//.. case B_HTTP_DATA_RECEIVED: { - // break; - // } +std::string Conversation::buildHistoryInfoLine() { + + std::string info = "" + std::to_string(_messageHistory.size()) + " messages"; + return info; + +} + +void Conversation::ClearHistory() { + + printf("Cleared history\n"); + _messageHistory.clear(); +} + +std::vector +Conversation::FilterTextModels(const json &modelsJson) { + std::vector result; + std::regex pattern("gpt|text|curie|babbage|ada"); + + for (const auto &model : modelsJson["data"]) { + std::string id = model["id"]; + if (std::regex_search(id, pattern) && + id.find("audio") == std::string::npos && + id.find("image") == std::string::npos && + id.find("image") == std::string::npos && + id.find("tts") == std::string::npos && + id.find("embed") == std::string::npos && + id.find("-20") == std::string::npos && + id.find("preview") == std::string::npos && + id.find("transcribe") == std::string::npos && + id.find("dall-e") == std::string::npos) { + result.push_back(id); + } + } + + std::sort(result.begin(), result.end(), std::greater<>()); // inverse alphabetical to get gpt-4 on top + return result; +} + +void Conversation::MessageReceived(BMessage *message) { + switch (message->what) { + //.. case B_HTTP_DATA_RECEIVED: { + // break; + // } + case UrlEvent::HostNameResolved: { printf("Host name resolved\n"); auto name = message->GetString(UrlEventData::HostName); @@ -94,7 +109,7 @@ void Conversation::MessageReceived(BMessage* message) { //_progress->SetTo(16); //_infoView->SetText("HttpRedirect..."); } break; - + case UrlEvent::RequestCompleted: { printf("RequestCompleted\n"); auto identifier = message->GetInt32(UrlEventData::Id, -1); @@ -102,45 +117,65 @@ void Conversation::MessageReceived(BMessage* message) { // The following call will not block, because we have been notified // that the request is done. BHttpBody body = _lastResult->Body(); - if (body.text.has_value()) - { - //printf("full Reply as text:%s",body.text.value().String()); - json parsed = json::parse(body.text.value().String()); - printf("Parsed..\n"); + if (body.text.has_value()) { + + + try { - std::string objType = parsed["object"]; - printf("Reply of type object :%s\n",objType.c_str()); - - if (objType == "list") - { - //printf("full Reply as text:%s",body.text.value().String()); - - std::vector validModels = FilterTextModels(parsed); - PrintAsJsonArray(validModels); + // printf("full Reply as text:%s",body.text.value().String()); + json parsed = json::parse(body.text.value().String()); + printf("Parsed..\n"); + + std::string objType = parsed["object"]; + printf("Reply of type object :%s\n", objType.c_str()); + + if (objType == "list") { + // printf("full Reply as text:%s",body.text.value().String()); + + std::vector validModels = FilterTextModels(parsed); + PrintAsJsonArray(validModels); + BMessage msg(kModelsReceived); + + for (const auto &model : validModels) { + msg.AddString("model", model.c_str()); + } + sendReply(msg); + + // std::string content = + //parsed["choices"][0]["message"]["content"]; + + } + + else if (objType == "chat.completion") { + std::string content = parsed["choices"][0]["message"]["content"]; -// std::string content = parsed["choices"][0]["message"]["content"]; - - } + _messageHistory.push_back({ + {"role", "assistant"}, + {"content", content} + }); - else - if (objType == "chat.completion") - { - std::string content = parsed["choices"][0]["message"]["content"]; - - // printf("we got content:%s",content.c_str()); - BMessage message(kSendReply); - message.AddString("text", BString(content.c_str())); - sendReply(message); - } - } - else - { - BMessage message(kSendReply); - message.AddString("text", "EMPTY BODY"); + + // printf("we got content:%s",content.c_str()); + BMessage message(kSendReply); + message.AddString("text", BString(content.c_str())); + sendReply(message); + } + + } catch (const std::exception &e) { + fprintf(stderr, "Error parsing JSON: %s\n", e.what()); + std::string content = "Error parsing JSON, wrong model ?"; + BMessage message(kSendReply); + message.AddString("text", BString(content.c_str())); + sendReply(message); + } + + } else { + BMessage message(kSendReply); + message.AddString("text", "EMPTY BODY"); sendReply(message); - } - } + } + } } break; @@ -154,12 +189,12 @@ void Conversation::MessageReceived(BMessage* message) { } break; case UrlEvent::BytesWritten: { - // _infoView->SetText("Some bytes written.."); + // _infoView->SetText("Some bytes written.."); auto identifier = message->GetInt32(UrlEventData::Id, -1); if (_lastResult->Identity() == identifier) { off_t numBytes = message->GetInt64(UrlEventData::NumBytes, 0); off_t totalBytes = message->GetInt64(UrlEventData::TotalBytes, 0); - // _progress->SetTo(numBytes); + // _progress->SetTo(numBytes); //_progress->SetMaxValue(totalBytes); } } break; @@ -174,11 +209,11 @@ void Conversation::MessageReceived(BMessage* message) { //_infoView->SetText("Download Progress.."); } } break; - - default: - BHandler::MessageReceived(message); - break; - } + + default: + BHandler::MessageReceived(message); + break; + } } std::string Conversation::buildBearerKey() { @@ -191,7 +226,6 @@ std::string Conversation::buildBearerKey() { std::string bearer = std::string("Bearer ") + std::string(key); return bearer; - } void Conversation::loadModels() { @@ -199,7 +233,6 @@ void Conversation::loadModels() { auto url = BUrl("https://api.openai.com/v1/models"); BHttpRequest request = BHttpRequest(url); request.SetMethod(BHttpMethod::Get); - BHttpFields fields = BHttpFields(); fields.AddField("Authorization", buildBearerKey()); @@ -213,33 +246,45 @@ void Conversation::loadModels() { if (_lastResult) { printf("Result has identity: %d\n", _lastResult->Identity()); } - - - } -void Conversation::ask(const std::string& prompt) { - -// printf("Asking prompt: %s",prompt.c_str()); +void Conversation::setModel(const std::string &model) { + + _activeModel = model; + + printf("Conversation will use model:%s\n", _activeModel.c_str()); +} + +void Conversation::ask(const std::string &prompt) { + + _messageHistory.push_back({ + {"role", "user"}, + {"content", prompt} + }); + + // printf("Asking prompt: %s",prompt.c_str()); if (_lastResult) _sharedSession.Cancel(_lastResult->Identity()); - auto url = BUrl("https://api.openai.com/v1/chat/completions"); BHttpRequest request = BHttpRequest(url); request.SetMethod(BHttpMethod::Post); - BHttpFields fields = BHttpFields(); fields.AddField("Authorization", buildBearerKey()); // fields.AddField("Content-Type", "application/json"); //NO, this will // crash, we set it in request request.SetFields(fields); - json bodyJson = { - {"model", "gpt-4o"}, - {"messages", {{{"role", "user"}, {"content", prompt}}}}}; +// WE PASS THE WHOLE HISTORY to keep context, as recommended for this stateless API! +// json bodyJson = {{"model", _activeModel}, + // {"messages", {{{"role", "user"}, {"content", prompt}}}}}; + + json bodyJson = { + {"model", _activeModel}, + {"messages", _messageHistory} + }; std::string body = bodyJson.dump(); @@ -256,28 +301,22 @@ void Conversation::ask(const std::string& prompt) { } } - - - - BString Conversation::ReadOpenAIKey() { - BPath configPath; if (find_directory(B_USER_SETTINGS_DIRECTORY, &configPath) != B_OK) return "error: couldn't find config directory"; - + // /boot/home/config/openai_key configPath.Append("openai_key"); BFile file(configPath.Path(), B_READ_ONLY); printf("full path:%s\n", configPath.Path()); - if (file.InitCheck() != B_OK) - { - validKey=false; + if (file.InitCheck() != B_OK) { + validKey = false; return "error: couldn't open key file "; - } + } off_t size; file.GetSize(&size); @@ -288,10 +327,8 @@ BString Conversation::ReadOpenAIKey() { BString result(buffer); delete[] buffer; - - validKey=true; + + validKey = true; return result; } - - diff --git a/Conversation.h b/Conversation.h index 4b8162a..3993a13 100644 --- a/Conversation.h +++ b/Conversation.h @@ -39,6 +39,9 @@ static const uint32 kSendReply = 'krpl'; +static const uint32 kModelsReceived = 'mdls'; +static const uint32 kClearHistory = 'clrh'; + #include "include/json.hpp" @@ -61,18 +64,25 @@ public: std::vector FilterTextModels(const json& modelsJson); void ask(const std::string& prompt); + void setModel(const std::string& prompt); void loadModels(); void PrintAsJsonArray(const std::vector& models) ; - + void ClearHistory(); + std::string buildHistoryInfoLine(); + private: - std::string buildBearerKey(); - - + std::string buildBearerKey(); + + std::vector _messageHistory; + void sendReply(BMessage message); BHandler* replyTarget; BString ReadOpenAIKey(); BString _apiKey; BHttpSession _sharedSession = BHttpSession (); std::optional _lastResult; + + std::string _activeModel = "gpt-4o"; + }; \ No newline at end of file diff --git a/DumBer b/DumBer index e04ac0e..286e43c 100755 Binary files a/DumBer and b/DumBer differ diff --git a/MainWindow.cpp b/MainWindow.cpp index c022834..deb25d8 100644 --- a/MainWindow.cpp +++ b/MainWindow.cpp @@ -6,9 +6,11 @@ #include "MainWindow.h" static int progressAnim = 0; - +static int progressColor = 0; +static bool progressColorUp = false; #include +#include #include #include #include @@ -17,10 +19,11 @@ static int progressAnim = 0; #include #include #include - -#include +#include + #include #include +#include #include "Conversation.h" @@ -28,15 +31,13 @@ static int progressAnim = 0; #define B_TRANSLATION_CONTEXT "Window" MainWindow::MainWindow() - : BWindow(BRect(100, 100, 600, 400), B_TRANSLATE("BeDumb"), B_TITLED_WINDOW, + : BWindow(BRect(50, 50, 600, 400), B_TRANSLATE("DumBer"), B_TITLED_WINDOW, B_ASYNCHRONOUS_CONTROLS | B_QUIT_ON_WINDOW_CLOSE) { - - //Without this conversation would never get bmessages from HttpRequest - LockLooper(); - AddHandler(_conversation); - UnlockLooper(); - + // Without this conversation would never get bmessages from HttpRequest + LockLooper(); + AddHandler(_conversation); + UnlockLooper(); BMenuBar *menuBar = _BuildMenu(); @@ -47,18 +48,21 @@ MainWindow::MainWindow() _inputField->MakeEditable(true); _inputField->MakeSelectable(true); _inputField->SetWordWrap(true); - + + _modelMenu = new BPopUpMenu("Models"); + _modelField = new BMenuField("model_field", NULL, _modelMenu); + _modelField->SetEnabled(false); _progress = new BStatusBar("prog"); _progress->SetMaxValue(100); _progress->SetTo(0); _progress->SetViewColor(ui_color(B_PANEL_BACKGROUND_COLOR)); - BStringView *header = new BStringView("biglabel", "Let's Be Dumber!"); - BFont font; - header->GetFont(&font); - font.SetSize(20); - header->SetFont(&font); + // BStringView *header = new BStringView("biglabel", "Let's Be Dumber!"); + // BFont font; + // header->GetFont(&font); + // font.SetSize(20); + // header->SetFont(&font); // Info view, only one line high _infoView = new BTextView("info"); @@ -68,115 +72,275 @@ MainWindow::MainWindow() _infoView->MakeSelectable(false); _infoView->SetWordWrap(false); + + _infoConversation = new BTextView("convers"); + _infoConversation->SetText("(No history)"); + _infoConversation->SetViewColor(ui_color(B_PANEL_BACKGROUND_COLOR)); + _infoConversation->MakeEditable(false); + _infoConversation->MakeSelectable(false); + _infoConversation->SetWordWrap(false); + + float lineHeight = _infoView->LineHeight(0); _infoView->SetExplicitMinSize(BSize(B_SIZE_UNSET, lineHeight)); _infoView->SetExplicitMaxSize(BSize(B_SIZE_UNLIMITED, lineHeight)); - float askH = lineHeight * 5; + _infoConversation->SetExplicitMinSize(BSize(B_SIZE_UNSET, lineHeight)); + _infoConversation->SetExplicitMaxSize(BSize(B_SIZE_UNLIMITED, lineHeight)); + + float askH = lineHeight * 2; _inputField->SetExplicitMinSize(BSize(B_SIZE_UNSET, askH)); - BButton *sendButton = + _sendButton = new BButton("send", B_TRANSLATE("Send"), new BMessage(kSendPrompt), B_WILL_DRAW | B_NAVIGABLE); + _sendButton->MakeDefault(true); + _answerView = new BTextView("answer", B_WILL_DRAW | B_FOLLOW_ALL); _answerView->MakeEditable(false); // Disable editing _answerView->MakeSelectable(true); // Enable text selection _answerView->SetWordWrap(true); + _answerView->SetExplicitMinSize(BSize(B_SIZE_UNSET, askH*2)); + BScrollView *scrollView = new BScrollView("scroll_view", _answerView, B_FOLLOW_ALL | B_WILL_DRAW, 0, false, true); // horizontal and vertical scrollbars + + //BView *imageView = new BView("icon_view", B_WILL_DRAW | B_FOLLOW_NONE); + //imageView->SetViewColor(ui_color(B_PANEL_BACKGROUND_COLOR)); + + + BStringView *headerQuestion = new BStringView("questionLabel", "Your question: "); + BStringView *headerAnswer = new BStringView("questionAnswer", "Answer: "); + + BLayoutBuilder::Group<>(this, B_VERTICAL, 0) - - .AddGlue(0.1) - .Add(header) + .AddGlue(0.1) + .Add(headerQuestion) + .AddGroup(B_HORIZONTAL, 0, 1) + .Add(_inputField,0.25) + .AddGroup(B_HORIZONTAL, 0, 0) + .AddGlue() + + .AddGroup(B_VERTICAL, 0, 1) +// .Add(imageView) + + // .AddGlue() + + .AddGroup(B_HORIZONTAL, 0, 1) // left-align cnv + .Add(_infoConversation) + .AddGlue() + .End() + .AddGlue() + + + .AddGroup(B_HORIZONTAL, 0, 1) // left-align _modelField + .Add(_modelField) + .AddGlue() + .End() + + .AddGroup(B_HORIZONTAL, 0, 1) // left-align _sendButton + .Add(_sendButton) + .AddGlue() + .End() + + .End() + .SetInsets(12, 6, 12, 0) - .AddGroup(B_HORIZONTAL, 0, 1) - .Add(_inputField) - .AddGroup(B_HORIZONTAL, 0, 1) - .Add(sendButton) .End() .End() .AddGlue(0.1) - .Add(scrollView) + .Add(headerAnswer) + .Add(scrollView,1) .Add(_progress) .Add(_infoView) - .SetInsets(5, 5, 5, 5) + .SetInsets(6, 6, 6, 6) .End(); + // Just to animate progress + BMessageRunner *runner = new BMessageRunner(this, // target BHandler + new BMessage(kPulse), + 100000 // interval in μs (0 ms) + - - //Just to animate progress - BMessageRunner* runner = new BMessageRunner( - this, // target BHandler - new BMessage(kPulse), - 50000 // interval in μs (0 ms) - ); + ); - PostMessage(kCheckKey); +// BBitmap* image = BTranslationUtils::GetBitmapFile("/boot/system/data/icons/hicolor/64x64/apps/kdevelop.png"); + //imageView->SetViewBitmap(image); + +// imageView->SetViewColor(B_TRANSPARENT_COLOR); + + updateHistoryInfo(); + PostMessage(kCheckKey); } - void MainWindow::checkValidKey() { - if (!_conversation->validKey) - { - _infoView->SetText("MISSING API KEY"); - ShowMissingKeyAlertAndQuit(); - return; - } - else - { - _infoView->SetText("API Key loaded."); - _conversation->loadModels(); - } + if (!_conversation->validKey) { + _infoView->SetText("MISSING API KEY"); + ShowMissingKeyAlertAndQuit(); + return; + } else { + _infoView->SetText("API Key loaded."); + waitMode = true; + progressColor = 0; + progressAnim = 1; + + _conversation->loadModels(); + _infoView->SetText("Requesting model lists..."); + } } -void MainWindow::ShowMissingKeyAlertAndQuit() -{ +void MainWindow::ShowMissingKeyAlertAndQuit() { -BAlert* alert = new BAlert("Missing key file!", "Create a file named 'openai_key' containing a valid OpenAI Token on one line in \n\n/boot/home/config/settings/openai_key .\n\nThen relaunch the app.\n\nBe aware that this is not a safe storage so don't use valuable keys.", "Oh, no", "Sigh", "Just give up", B_WIDTH_AS_USUAL,B_WARNING_ALERT); - -alert->SetType(B_INFO_ALERT); - -uint32 result = alert->Go(); -PostMessage(B_QUIT_REQUESTED); + BAlert *alert = new BAlert( + "Missing key file!", + "Create a file named 'openai_key' containing a valid OpenAI Token on one " + "line in \n\n/boot/home/config/settings/openai_key .\n\nThen relaunch " + "the app.\n\nBe aware that this is not a safe storage so don't use " + "valuable keys.", + "Oh, no", "Sigh", "Just give up", B_WIDTH_AS_USUAL, B_WARNING_ALERT); + alert->SetType(B_INFO_ALERT); + uint32 result = alert->Go(); + PostMessage(B_QUIT_REQUESTED); } - MainWindow::~MainWindow() {} +void MainWindow::SelectModelByName(const char *targetLabel) { + BMenu *menu = _modelField->Menu(); + if (!menu) + return; + + for (int32 i = 0; i < menu->CountItems(); ++i) { + BMenuItem *item = menu->ItemAt(i); + + // printf("comparing %s\n", item->Label()); + if (item && strcmp(item->Label(), targetLabel) == 0) { + printf("FOUND %s\n", item->Label()); + + item->SetMarked(true); + PostMessage(item->Message()); + + break; + } + } +} + + +void MainWindow::updateHistoryInfo() { + + _infoConversation->SetText(_conversation->buildHistoryInfoLine().c_str()); + + +} + void MainWindow::MessageReceived(BMessage *message) { - switch (message->what) { + switch (message->what) { - case kCheckKey: { - checkValidKey(); - } - break; + case kCheckKey: { + checkValidKey(); + } break; - case kPulse: { - if (progressAnim >=1 && progressAnim <= 85) { - _progress->SetTo(progressAnim); - progressAnim++; - } - } - break; - - // case kMsgNewFile: { + + + case kClearHistory: { + + printf("will clear history"); + _infoView->SetText("Cleared conversation history. Starting new context"); + _inputField->SetText(""); + _answerView->SetText(""); + _conversation->ClearHistory(); + updateHistoryInfo(); + + } break; + + + case kModelSelected: { + + printf("model selected"); + const char *model; + + if (message->FindString("model", &model) == B_OK) { + _infoView->SetText(BString("Model selected: ") << model); + printf("model selected: %s\n", model); + _conversation->setModel(model); + } + + } break; + + case kModelsReceived: { + waitMode = false; + progressAnim = 100; + + _modelMenu->RemoveItems(0, _modelMenu->CountItems(), true); + _infoView->SetText("Models list received."); + + _modelMenu->SetTargetForItems(this); + const char *model; + for (int32 i = 0; message->FindString("model", i, &model) == B_OK; ++i) { + BMessage *modelMsg = new BMessage(kModelSelected); + modelMsg->AddString("model", model); + BMenuItem *item = new BMenuItem(model, modelMsg); + item->SetTarget(this); + _modelMenu->AddItem(item); + } + _modelField->SetEnabled(true); + _sendButton->SetEnabled(true); + + SelectModelByName("gpt-4o-mini"); + + } break; + + case kPulse: { + uint8 r = (uint8)((20 * progressColor) / 255); + uint8 g = (uint8)((128 * progressColor) / 255); + uint8 b = (uint8)((255 * progressColor) / 255); + + rgb_color color = {r, g, b, 255}; + _progress->SetBarColor(color); + + if (waitMode) { + + int step = 8; + + if (progressColorUp) + progressColor += step; + else + progressColor -= step; + + if (progressColor >= 255) { + progressColorUp = false; + progressColor = 255; + } else if (progressColor <= 0) { + progressColorUp = true; + progressColor = 0; + } + + if (progressAnim >= 1 && progressAnim <= 99) { + _progress->SetTo(progressAnim); + progressAnim++; + } + } else + _progress->SetTo(progressAnim); + } break; + + // case kMsgNewFile: { // fSaveMenuItem->SetEnabled(false); // printf("New\n"); // } break; @@ -194,39 +358,46 @@ void MainWindow::MessageReceived(BMessage *message) { _progress->SetMaxValue(100); _progress->SetTo(0); - _answerView->SetText("..."); - progressAnim = 1;//will trigger animation - - + _answerView->SetText("..."); + progressAnim = 1; // will trigger animation + _sendButton->SetEnabled(false); + waitMode = true; + progressColor = 255; printf("Button Pressed\n"); - _infoView->SetText("Asking..."); + _infoView->SetText("Asking..."); _conversation->ask(std::string(_inputField->Text())); } break; case kSendReply: { + + _sendButton->SetEnabled(true); + + waitMode = false; + progressColor = 255; + printf("Conversation returned!\n"); - _infoView->SetText("Answer Received"); + _infoView->SetText("Answer Received"); - progressAnim = 100; + progressAnim = 100; - - const char* text; - if (message->FindString("text", &text) == B_OK) { - // printf("Received text: %s\n", text); - // Do something with text (e.g., set it to a BTextView) - _answerView->SetText(text); - } else { - printf("No text found in message.\n"); - _answerView->SetText("NO TEXT IN REPLY"); - } + const char *text; + if (message->FindString("text", &text) == B_OK) { + // printf("Received text: %s\n", text); + // Do something with text (e.g., set it to a BTextView) + _answerView->SetText(text); + } else { + printf("No text found in message.\n"); + _answerView->SetText("NO TEXT IN REPLY"); + } _progress->SetMaxValue(100); _progress->SetTo(100); - - } break; + + updateHistoryInfo(); + } break; default: { // message->PrintToStream(); @@ -240,7 +411,6 @@ void MainWindow::MessageReceived(BMessage *message) { } // end function - BMenuBar *MainWindow::_BuildMenu() { BMenuBar *menuBar = new BMenuBar("menubar"); @@ -272,10 +442,23 @@ BMenuBar *MainWindow::_BuildMenu() { item = new BMenuItem(B_TRANSLATE("Quit"), new BMessage(B_QUIT_REQUESTED), 'Q'); menu->AddItem(item); + menuBar->AddItem(menu); +//------------------------- + + menu = new BMenu(B_TRANSLATE("History")); + + item = new BMenuItem(B_TRANSLATE("Clear History" B_UTF8_ELLIPSIS), + new BMessage(kClearHistory)); + item->SetTarget(this); + menu->AddItem(item); + + + menuBar->AddItem(menu); + + + return menuBar; } - - diff --git a/MainWindow.h b/MainWindow.h index 1928377..02f9d30 100644 --- a/MainWindow.h +++ b/MainWindow.h @@ -12,6 +12,8 @@ #include #include #include +#include +#include #include "Conversation.h" @@ -21,6 +23,7 @@ static const uint32 kCheckKey = 'chkk'; static const uint32 kMsgNewFile = 'fnew'; static const uint32 kMsgOpenFile = 'fopn'; static const uint32 kMsgSaveFile = 'fsav'; +static const uint32 kModelSelected = 'msel'; static const uint32 kPulse = 'plse'; @@ -42,18 +45,25 @@ public: void checkValidKey(); +void updateHistoryInfo(); private: void ShowMissingKeyAlertAndQuit(); + void SelectModelByName(const char* targetLabel); + bool waitMode = false; BMenuBar *_BuildMenu(); BTextView * _answerView; + BTextView * _infoConversation; BTextView * _infoView; BTextView* _inputField; BStatusBar* _progress; - + BMenuField* _modelField; + BPopUpMenu* _modelMenu; + BButton *_sendButton; + // BMenuItem *fSaveMenuItem; }; diff --git a/Makefile b/Makefile index dce2539..795d992 100644 --- a/Makefile +++ b/Makefile @@ -59,7 +59,7 @@ RSRCS = # you need to specify the path to the library and it's name. # (e.g. for mylib.a, specify "mylib.a" or "path/mylib.a") -LIBS = be netservices2 localestub $(STDCPPLIBS) network bnetapi +LIBS = be translation netservices2 localestub $(STDCPPLIBS) network bnetapi # Specify additional paths to directories following the standard libXXX.so # or libXXX.a naming scheme. You can specify full paths or paths relative diff --git a/Resources.rdef b/Resources.rdef index d15a1b6..3523d13 100644 --- a/Resources.rdef +++ b/Resources.rdef @@ -13,8 +13,8 @@ resource app_version { internal = 0, - short_info = "A short description", - long_info = "A little bit longer description of the app." + short_info = "A simple native Haiku client for ChatGPT", + long_info = "A simple native Haiku client for ChatGPT that uses simple text and very little memory. It requires having your own API Key." }; resource vector_icon {