From 6fffadfc0e926d443ed949e42952aaf389bfddad Mon Sep 17 00:00:00 2001 From: Jaret Burkett Date: Sat, 16 Aug 2025 18:07:21 -0600 Subject: [PATCH] Fixed a bug that prevented training just one stage of Wan 2.2 14b --- extensions_built_in/diffusion_models/wan22/wan22_14b_model.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/extensions_built_in/diffusion_models/wan22/wan22_14b_model.py b/extensions_built_in/diffusion_models/wan22/wan22_14b_model.py index a1b75d49..de4c7c59 100644 --- a/extensions_built_in/diffusion_models/wan22/wan22_14b_model.py +++ b/extensions_built_in/diffusion_models/wan22/wan22_14b_model.py @@ -204,6 +204,10 @@ class Wan2214bModel(Wan225bModel): raise ValueError( "At least one of train_high_noise or train_low_noise must be True in model.model_kwargs" ) + + # if we are only training one or the other, the target LoRA modules will be the wan transformer class + if not self.train_high_noise or not self.train_low_noise: + self.target_lora_modules = ["WanTransformer3DModel"] @property def max_step_saves_to_keep_multiplier(self):