server: Introduce LLAMA_BUILD_WEBUI build flag to allow disabling the embedded web ui (#20158)

* introduce LLAMA_SERVER_NO_WEBUI

* LLAMA_SERVER_NO_WEBUI → LLAMA_BUILD_WEBUI

* LLAMA_BUILD_WEBUI ON by default not based on LLAMA_STANDALONE

* MIssed this

* Add useWebUi to package.nix
This commit is contained in:
Kusha Gharahi 2026-03-27 11:25:55 -05:00 committed by GitHub
parent ee051c1e4e
commit ff934e29bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 32 additions and 15 deletions

View file

@ -8,9 +8,11 @@
#include <string>
#include <thread>
#ifdef LLAMA_BUILD_WEBUI
// auto generated files (see README.md for details)
#include "index.html.gz.hpp"
#include "loading.html.hpp"
#endif
//
// HTTP implementation using cpp-httplib
@ -181,11 +183,14 @@ bool server_http_context::init(const common_params & params) {
auto middleware_server_state = [this](const httplib::Request & req, httplib::Response & res) {
bool ready = is_ready.load();
if (!ready) {
#ifdef LLAMA_BUILD_WEBUI
auto tmp = string_split<std::string>(req.path, '.');
if (req.path == "/" || tmp.back() == "html") {
res.status = 503;
res.set_content(reinterpret_cast<const char*>(loading_html), loading_html_len, "text/html; charset=utf-8");
} else {
} else
#endif
{
// no endpoints is allowed to be accessed when the server is not ready
// this is to prevent any data races or inconsistent states
res.status = 503;
@ -255,6 +260,7 @@ bool server_http_context::init(const common_params & params) {
return 1;
}
} else {
#ifdef LLAMA_BUILD_WEBUI
// using embedded static index.html
srv->Get(params.api_prefix + "/", [](const httplib::Request & req, httplib::Response & res) {
if (req.get_header_value("Accept-Encoding").find("gzip") == std::string::npos) {
@ -268,6 +274,7 @@ bool server_http_context::init(const common_params & params) {
}
return false;
});
#endif
}
}
return true;