From 6c2afbbbbdaea7610ba068d836fb649ccc4683b6 Mon Sep 17 00:00:00 2001 From: layerdiffusion <19834515+lllyasviel@users.noreply.github.com> Date: Sat, 3 Aug 2024 14:37:36 -0700 Subject: [PATCH] Update forge_perturbed_attention.py --- .../scripts/forge_perturbed_attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/extensions-builtin/sd_forge_perturbed_attention/scripts/forge_perturbed_attention.py b/extensions-builtin/sd_forge_perturbed_attention/scripts/forge_perturbed_attention.py index c506240a..9389e92b 100644 --- a/extensions-builtin/sd_forge_perturbed_attention/scripts/forge_perturbed_attention.py +++ b/extensions-builtin/sd_forge_perturbed_attention/scripts/forge_perturbed_attention.py @@ -1,8 +1,8 @@ import gradio as gr -import ldm_patched.modules.samplers from modules import scripts from backend.patcher.base import set_model_options_patch_replace +from backend.sampling.sampling_function import calc_cond_uncond_batch class PerturbedAttentionGuidanceForForge(scripts.Script): @@ -41,7 +41,7 @@ class PerturbedAttentionGuidanceForForge(scripts.Script): if scale == 0: return denoised - degraded, _ = ldm_patched.modules.samplers.calc_cond_uncond_batch(model, cond, None, x, sigma, new_options) + degraded, _ = calc_cond_uncond_batch(model, cond, None, x, sigma, new_options) return denoised + (cond_denoised - degraded) * scale