Added ability to do prompt attn masking for flux

This commit is contained in:
Jaret Burkett
2024-09-02 17:29:36 -06:00
parent d44d4eb61a
commit e5fadddd45
3 changed files with 9 additions and 7 deletions

View File

@@ -417,6 +417,9 @@ class ModelConfig:
# only for flux for now
self.quantize = kwargs.get("quantize", False)
self.low_vram = kwargs.get("low_vram", False)
self.attn_masking = kwargs.get("attn_masking", False)
if self.attn_masking and not self.is_flux:
raise ValueError("attn_masking is only supported with flux models currently")
pass