Added Differential Guidance training target

This commit is contained in:
Jaret Burkett
2025-11-10 09:38:25 -07:00
parent 9b89bab8fe
commit 2e7b2d9926
9 changed files with 101 additions and 6 deletions

View File

@@ -545,7 +545,10 @@ class TrainConfig:
self.unconditional_prompt: str = kwargs.get('unconditional_prompt', '')
if isinstance(self.guidance_loss_target, tuple):
self.guidance_loss_target = list(self.guidance_loss_target)
self.do_differential_guidance = kwargs.get('do_differential_guidance', False)
self.differential_guidance_scale = kwargs.get('differential_guidance_scale', 3.0)
# for multi stage models, how often to switch the boundary
self.switch_boundary_every: int = kwargs.get('switch_boundary_every', 1)

View File

@@ -2907,7 +2907,7 @@ class StableDiffusion:
try:
te_has_grad = encoder.text_model.final_layer_norm.weight.requires_grad
except:
te_has_grad = encoder.encoder.block[0].layer[0].SelfAttention.q.weight.requires_grad
te_has_grad = False
self.device_state['text_encoder'].append({
'training': encoder.training,
'device': encoder.device,