Fix Anthropic Messages API (#1136)

* server: stop processing the prompt when client disconnects

implement generator-based API for task results

Update httplib.h to 0.27.0

Fix embedding error

Stop prompt processing when disconnected

* Port upstream https://github.com/ggml-org/llama.cpp/pull/18551

* add back anthropic

* Fix merge issue caused by github webui

---------

Co-authored-by: firecoperana <firecoperana>
This commit is contained in:
hksdpc255
2026-01-13 17:37:29 +11:00
committed by GitHub
parent 013831bba5
commit e1c4c4a495
6 changed files with 165 additions and 28 deletions

View File

@@ -506,15 +506,29 @@ bool server_sent_event(httplib::DataSink& sink, const json& data) {
}
bool server_sent_anthropic_event(httplib::DataSink& sink, const json& data) {
const std::string str =
(data.contains("event") && data.contains("data")) ?
("event: " + data.at("event").get<std::string>() + "\n" +
"data: " + data.at("data").dump(-1, ' ', false, json::error_handler_t::replace) + "\n\n") :
("data: " + data.at("data").dump(-1, ' ', false, json::error_handler_t::replace) + "\n\n");
static auto send_single = [](httplib::DataSink& sink, const json& data) -> bool {
const std::string str =
(data.contains("event") && data.contains("data")) ?
("event: " + data.at("event").get<std::string>() + "\n" +
"data: " + data.at("data").dump(-1, ' ', false, json::error_handler_t::replace) + "\n\n") :
("data: " + data.at("data").dump(-1, ' ', false, json::error_handler_t::replace) + "\n\n");
LOG_VERBOSE("data stream, to_send: %s", str.c_str());
LOG_DBG("data stream, to_send: %s", str.c_str());
return sink.write(str.c_str(), str.size());
};
return sink.write(str.c_str(), str.size());
if (data.is_array()) {
for (const auto& item : data) {
if (!send_single(sink, item)) {
return false;
}
}
}
else {
return send_single(sink, data);
}
return true;
}
//