Added option to cache empty prompt or trigger and unload text encoders while training

This commit is contained in:
Jaret Burkett
2024-09-21 20:54:09 -06:00
parent f85ad452c6
commit 2776221497
5 changed files with 74 additions and 1 deletions

View File

@@ -377,6 +377,10 @@ class TrainConfig:
self.linear_timesteps2 = kwargs.get('linear_timesteps2', False)
self.disable_sampling = kwargs.get('disable_sampling', False)
# will cache a blank prompt or the trigger word, and unload the text encoder to cpu
# will make training faster and use less vram
self.unload_text_encoder = kwargs.get('unload_text_encoder', False)
class ModelConfig:
def __init__(self, **kwargs):

View File

@@ -40,6 +40,7 @@ def get_train_sd_device_state_preset(
train_adapter: bool = False,
train_embedding: bool = False,
train_refiner: bool = False,
unload_text_encoder: bool = False,
):
preset = copy.deepcopy(empty_preset)
if not cached_latents:
@@ -88,4 +89,9 @@ def get_train_sd_device_state_preset(
preset['unet']['device'] = device
preset['text_encoder']['device'] = device
if unload_text_encoder:
preset['text_encoder']['training'] = False
preset['text_encoder']['requires_grad'] = False
preset['text_encoder']['device'] = 'cpu'
return preset

View File

@@ -2635,3 +2635,10 @@ class StableDiffusion:
}
self.set_device_state(state)
def text_encoder_to(self, *args, **kwargs):
if isinstance(self.text_encoder, list):
for encoder in self.text_encoder:
encoder.to(*args, **kwargs)
else:
self.text_encoder.to(*args, **kwargs)