mirror of
https://github.com/hyprwm/Hyprland
synced 2024-12-22 19:49:49 +01:00
Make hyprctl thread safe
This commit is contained in:
parent
304b93a4f6
commit
883d389bc2
3 changed files with 63 additions and 13 deletions
|
@ -111,6 +111,60 @@ std::string dispatchKeyword(std::string in) {
|
|||
return retval;
|
||||
}
|
||||
|
||||
void HyprCtl::tickHyprCtl() {
|
||||
if (!requestMade)
|
||||
return;
|
||||
|
||||
std::string reply = "";
|
||||
|
||||
try {
|
||||
if (request == "monitors")
|
||||
reply = monitorsRequest();
|
||||
else if (request == "workspaces")
|
||||
reply = workspacesRequest();
|
||||
else if (request == "clients")
|
||||
reply = clientsRequest();
|
||||
else if (request == "activewindow")
|
||||
reply = activeWindowRequest();
|
||||
else if (request == "layers")
|
||||
reply = layersRequest();
|
||||
else if (request.find("dispatch") == 0)
|
||||
reply = dispatchRequest(request);
|
||||
else if (request.find("keyword") == 0)
|
||||
reply = dispatchKeyword(request);
|
||||
} catch (std::exception& e) {
|
||||
Debug::log(ERR, "Error in request: %s", e.what());
|
||||
reply = "Err: " + std::string(e.what());
|
||||
}
|
||||
|
||||
request = reply;
|
||||
|
||||
requestMade = false;
|
||||
requestReady = true;
|
||||
}
|
||||
|
||||
std::string getRequestFromThread(std::string rq) {
|
||||
while (HyprCtl::request != "" || HyprCtl::requestMade || HyprCtl::requestReady) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(5));
|
||||
}
|
||||
|
||||
HyprCtl::request = rq;
|
||||
HyprCtl::requestMade = true;
|
||||
|
||||
while (!HyprCtl::requestReady) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(5));
|
||||
}
|
||||
|
||||
HyprCtl::requestReady = false;
|
||||
HyprCtl::requestMade = false;
|
||||
|
||||
std::string toReturn = HyprCtl::request;
|
||||
|
||||
HyprCtl::request = "";
|
||||
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
void HyprCtl::startHyprCtlSocket() {
|
||||
std::thread([&]() {
|
||||
uint16_t connectPort = 9187;
|
||||
|
@ -164,19 +218,7 @@ void HyprCtl::startHyprCtlSocket() {
|
|||
|
||||
std::string request(readBuffer);
|
||||
|
||||
std::string reply = "";
|
||||
try {
|
||||
if (request == "monitors") reply = monitorsRequest();
|
||||
else if (request == "workspaces") reply = workspacesRequest();
|
||||
else if (request == "clients") reply = clientsRequest();
|
||||
else if (request == "activewindow") reply = activeWindowRequest();
|
||||
else if (request == "layers") reply = layersRequest();
|
||||
else if (request.find("dispatch") == 0) reply = dispatchRequest(request);
|
||||
else if (request.find("keyword") == 0) reply = dispatchKeyword(request);
|
||||
} catch (std::exception& e) {
|
||||
Debug::log(ERR, "Error in request: %s", e.what());
|
||||
reply = "Err: " + std::string(e.what());
|
||||
}
|
||||
std::string reply = getRequestFromThread(request);
|
||||
|
||||
write(ACCEPTEDCONNECTION, reply.c_str(), reply.length());
|
||||
|
||||
|
|
|
@ -8,5 +8,10 @@ namespace HyprCtl {
|
|||
void startHyprCtlSocket();
|
||||
void tickHyprCtl();
|
||||
|
||||
// very simple thread-safe request method
|
||||
inline bool requestMade = false;
|
||||
inline bool requestReady = false;
|
||||
inline std::string request = "";
|
||||
|
||||
inline std::ifstream requestStream;
|
||||
};
|
|
@ -3,6 +3,7 @@
|
|||
#include "../managers/InputManager.hpp"
|
||||
#include "../render/Renderer.hpp"
|
||||
#include "Events.hpp"
|
||||
#include "../debug/HyprCtl.hpp"
|
||||
|
||||
// --------------------------------------------------------- //
|
||||
// __ __ ____ _ _ _____ _______ ____ _____ _____ //
|
||||
|
@ -133,6 +134,8 @@ void Events::listener_monitorFrame(void* owner, void* data) {
|
|||
g_pAnimationManager->tick();
|
||||
g_pCompositor->cleanupWindows();
|
||||
|
||||
HyprCtl::tickHyprCtl(); // so that we dont get that race condition multithread bullshit
|
||||
|
||||
g_pConfigManager->dispatchExecOnce(); // We exec-once when at least one monitor starts refreshing, meaning stuff has init'd
|
||||
|
||||
if (g_pConfigManager->m_bWantsMonitorReload)
|
||||
|
|
Loading…
Reference in a new issue