From 115f0a36700e4ba296ef7331a595d175ef48b317 Mon Sep 17 00:00:00 2001 From: Jaret Burkett Date: Fri, 6 Feb 2026 14:26:53 -0700 Subject: [PATCH] Fixed error with wan models when caching text embeddings --- toolkit/models/wan21/wan21.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/toolkit/models/wan21/wan21.py b/toolkit/models/wan21/wan21.py index 998b2312..4932b51d 100644 --- a/toolkit/models/wan21/wan21.py +++ b/toolkit/models/wan21/wan21.py @@ -654,10 +654,10 @@ class Wan21(BaseModel): return latents.to(device, dtype=dtype) def get_model_has_grad(self): - return self.model.proj_out.weight.requires_grad + return False def get_te_has_grad(self): - return self.text_encoder.encoder.block[0].layer[0].SelfAttention.q.weight.requires_grad + return False def save_model(self, output_path, meta, save_dtype): # only save the unet