From 4f64f6daa4582c8b5ddd5ccdb96a82fe86eaa91b Mon Sep 17 00:00:00 2001 From: cluder <1590330+cluder@users.noreply.github.com> Date: Sun, 1 Sep 2024 02:09:06 +0200 Subject: [PATCH] Fix model loading during Checkpoint Merging #1359,#1095 (#1639) * Fix Checkpoint Merging #1359,#1095 - checkpoint_list[] contains the CheckpointInfo.title which is "checkpointname.safetensor [hash]" when a checkpoint is selected to be loaded during merge, we try to match it with just "checkpointname.safetensor". -> use checkpoint_aliases[] which already contains the checkpoint key in all possible variants. - replaced removed sd_models.read_state_dict() with sd_models.load_torch_file() - replaced removed sd_vae.load_vae_dict() with sd_vae.load_torch_file() - uncommented create_config() for now, since it calls a removed method: sd_models_config.find_checkpoint_config_near_filename() * Follow up merge fix for #1359 #1095 - read_state_dict() does nothing, replaced 2 occurrences with load_torch_file() - now merging actually merges again --- modules/extras.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/extras.py b/modules/extras.py index 6f05b0fe..306c5609 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -150,14 +150,14 @@ def run_modelmerger(id_task, primary_model_name, secondary_model_name, tertiary_ if theta_func2: shared.state.textinfo = "Loading B" print(f"Loading {secondary_model_info.filename}...") - theta_1 = sd_models.read_state_dict(secondary_model_info.filename, map_location='cpu') + theta_1 = sd_models.load_torch_file(secondary_model_info.filename) else: theta_1 = None if theta_func1: shared.state.textinfo = "Loading C" print(f"Loading {tertiary_model_info.filename}...") - theta_2 = sd_models.read_state_dict(tertiary_model_info.filename, map_location='cpu') + theta_2 = sd_models.load_torch_file(tertiary_model_info.filename) shared.state.textinfo = 'Merging B and C' shared.state.sampling_steps = len(theta_1.keys())