mirror of
https://github.com/ikawrakow/ik_llama.cpp.git
synced 2026-01-26 17:20:01 +00:00
add hunyuan moe support for 561 (#565)
* add hunyuan moe * Don't reshape Vcur * Apply chat template fix from mainline PR14584
This commit is contained in:
@@ -111,6 +111,7 @@ extern "C" {
|
||||
LLAMA_VOCAB_PRE_TYPE_FALCON_3 = 34,
|
||||
LLAMA_VOCAB_PRE_TYPE_FALCON_E = 35,
|
||||
LLAMA_VOCAB_PRE_TYPE_SEED_CODER = 36, //llama.cpp lists this as 35
|
||||
LLAMA_VOCAB_PRE_TYPE_HUNYUAN = 37, //llama.cpp lists this as 36
|
||||
};
|
||||
|
||||
// note: these values should be synchronized with ggml_rope
|
||||
|
||||
Reference in New Issue
Block a user