mirror of
https://github.com/ikawrakow/ik_llama.cpp.git
synced 2026-02-06 14:30:09 +00:00
Revert 2
This commit is contained in:
@@ -1138,7 +1138,7 @@ void server_context::kv_cache_clear() {
|
||||
LOG_VERBOSE("clearing KV cache", {});
|
||||
|
||||
// clear the entire KV cache
|
||||
llama_memory_clear(ctx);
|
||||
llama_kv_cache_clear(ctx);
|
||||
clean_kv_cache = false;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user