From 475524496d599e08bed036dc36025a4b9292c668 Mon Sep 17 00:00:00 2001 From: layerdiffusion <19834515+lllyasviel@users.noreply.github.com> Date: Mon, 19 Aug 2024 18:54:54 -0700 Subject: [PATCH] revise --- backend/memory_management.py | 2 +- backend/sampling/sampling_function.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/memory_management.py b/backend/memory_management.py index 03b4eff2..5e1c1401 100644 --- a/backend/memory_management.py +++ b/backend/memory_management.py @@ -929,7 +929,7 @@ def get_free_memory(dev=None, torch_free_too=False): mem_free_torch = mem_free_total else: if directml_enabled: - mem_free_total = 1024 * 1024 * 1024 * 2 + mem_free_total = 1024 * 1024 * 1024 mem_free_torch = mem_free_total elif is_intel_xpu(): stats = torch.xpu.memory_stats(dev) diff --git a/backend/sampling/sampling_function.py b/backend/sampling/sampling_function.py index 0c7ffd24..068177e3 100644 --- a/backend/sampling/sampling_function.py +++ b/backend/sampling/sampling_function.py @@ -189,7 +189,7 @@ def calc_cond_uncond_batch(model, cond, uncond, x_in, timestep, model_options): free_memory = memory_management.get_free_memory(x_in.device) - if not args.disable_gpu_warning: + if (not args.disable_gpu_warning) and x_in.device.type == 'cuda': free_memory_mb = free_memory / (1024.0 * 1024.0) safe_memory_mb = 1536.0 if free_memory_mb < safe_memory_mb: @@ -197,8 +197,8 @@ def calc_cond_uncond_batch(model, cond, uncond, x_in, timestep, model_options): print(f"[Low GPU VRAM Warning] Your current GPU free memory is {free_memory_mb:.2f} MB for this diffusion iteration.") print(f"[Low GPU VRAM Warning] This number is lower than the safe value of {safe_memory_mb:.2f} MB.") print(f"[Low GPU VRAM Warning] If you continue the diffusion process, you may cause NVIDIA GPU degradation, and the speed may be extremely slow (about 10x slower).") - print(f"[Low GPU VRAM Warning] To solve the problem, you can set the 'GPU Weight' (on the top of page) to a lower value.") - print(f"[Low GPU VRAM Warning] If you cannot find 'GPU Weight', you can click the 'all' option in the 'UI' area on the left-top corner of the webpage.") + print(f"[Low GPU VRAM Warning] To solve the problem, you can set the 'GPU Weights' (on the top of page) to a lower value.") + print(f"[Low GPU VRAM Warning] If you cannot find 'GPU Weights', you can click the 'all' option in the 'UI' area on the left-top corner of the webpage.") print(f"[Low GPU VRAM Warning] If you want to take the risk of NVIDIA GPU fallback and test the 10x slower speed, you can (but are highly not recommended to) add '--disable-gpu-warning' to CMD flags to remove this warning.") print(f"----------------------\n\n")