Disable some fusion, RoPE cache off by default (#894)

* Disable some fusion and make rope cahe off by default

* Minor

---------

Co-authored-by: Iwan Kawrakow <iwan.kawrakow@gmail.com>
This commit is contained in:
Kawrakow
2025-11-04 07:50:14 +02:00
committed by GitHub
parent fb0d5a995c
commit c23fda2103
4 changed files with 7 additions and 11 deletions

View File

@@ -249,7 +249,7 @@ struct gpt_params {
bool fused_up_gate = true; // fused up*unary(gate) op
bool fused_mmad = true; // fused mul+multi_add op
bool grouped_expert_routing = false; // if to use grouped expert routing (BailingMoeV2 arch)
bool rope_cache = true; // if to use RoPE cache (for supported models)
bool rope_cache = false; // if to use RoPE cache (for supported models)
int min_experts = -1;
float thresh_experts = 0;