From 1e0bff653ccf8e7fc5520a86141cacd6035cdb77 Mon Sep 17 00:00:00 2001 From: Jaret Burkett Date: Sun, 13 Apr 2025 21:15:07 -0600 Subject: [PATCH] Fix new bug I accidently introduced with lora --- toolkit/lora_special.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/toolkit/lora_special.py b/toolkit/lora_special.py index e213face..03bbc949 100644 --- a/toolkit/lora_special.py +++ b/toolkit/lora_special.py @@ -341,7 +341,9 @@ class LoRASpecialNetwork(ToolkitNetworkMixin, LoRANetwork): skip = True if self.transformer_only and is_unet: - transformer_block_names = base_model.get_transformer_block_names() + transformer_block_names = None + if base_model is not None: + transformer_block_names = base_model.get_transformer_block_names() if transformer_block_names is not None: if not any([name in lora_name for name in transformer_block_names]):