Fix new bug I accidently introduced with lora

This commit is contained in:
Jaret Burkett
2025-04-13 21:15:07 -06:00
parent ca3ce0f34c
commit 1e0bff653c

View File

@@ -341,7 +341,9 @@ class LoRASpecialNetwork(ToolkitNetworkMixin, LoRANetwork):
skip = True
if self.transformer_only and is_unet:
transformer_block_names = base_model.get_transformer_block_names()
transformer_block_names = None
if base_model is not None:
transformer_block_names = base_model.get_transformer_block_names()
if transformer_block_names is not None:
if not any([name in lora_name for name in transformer_block_names]):