From 4f896c0d8a03e36b2ab89387c865127cc51fb499 Mon Sep 17 00:00:00 2001 From: Jaret Burkett Date: Sat, 17 May 2025 19:37:55 +0000 Subject: [PATCH] Fixed issue where sampling fails if doing a full finetune for some models --- toolkit/models/base_model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/toolkit/models/base_model.py b/toolkit/models/base_model.py index 27a94503..17550dde 100644 --- a/toolkit/models/base_model.py +++ b/toolkit/models/base_model.py @@ -343,7 +343,7 @@ class BaseModel: pipeline: Union[None, StableDiffusionPipeline, StableDiffusionXLPipeline] = None, ): - network = unwrap_model(self.network) + network = self.network merge_multiplier = 1.0 flush() # if using assistant, unfuse it @@ -364,6 +364,7 @@ class BaseModel: self.assistant_lora.force_to(self.device_torch, self.torch_dtype) if network is not None: + network = unwrap_model(self.network) network.eval() # check if we have the same network weight for all samples. If we do, we can merge in th # the network to drastically speed up inference