From f40930c55bad25ba823aad3eb9c710847688212d Mon Sep 17 00:00:00 2001 From: layerdiffusion <19834515+lllyasviel@users.noreply.github.com> Date: Sun, 8 Sep 2024 17:24:53 -0700 Subject: [PATCH] fix --- backend/patcher/lora.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/patcher/lora.py b/backend/patcher/lora.py index 2c5de045..42f379bb 100644 --- a/backend/patcher/lora.py +++ b/backend/patcher/lora.py @@ -31,11 +31,11 @@ def model_lora_keys_clip(model, key_map={}): for model_key in model_keys: if model_key.endswith(".weight"): if model_key.startswith("t5xxl.transformer."): - # Flux OneTrainer T5 - formatted = inner_str(model_key, "t5xxl.transformer.", ".weight") - formatted = formatted.replace(".", "_") - formatted = f"lora_te2_{formatted}" - key_map[formatted] = model_key + for prefix in ['te1', 'te2', 'te3']: + formatted = inner_str(model_key, "t5xxl.transformer.", ".weight") + formatted = formatted.replace(".", "_") + formatted = f"lora_{prefix}_{formatted}" + key_map[formatted] = model_key return key_maps