add a way to empty cuda cache on the fly

This commit is contained in:
layerdiffusion
2024-08-22 10:06:39 -07:00
parent 64b5ce49d1
commit 14eac6f2cf
3 changed files with 14 additions and 2 deletions

View File

@@ -187,6 +187,9 @@ def calc_cond_uncond_batch(model, cond, uncond, x_in, timestep, model_options):
to_batch_temp.reverse()
to_batch = to_batch_temp[:1]
if memory_management.signal_empty_cache:
memory_management.soft_empty_cache()
free_memory = memory_management.get_free_memory(x_in.device)
if (not args.disable_gpu_warning) and x_in.device.type == 'cuda':