Shrink text embeds to max token length for LTX-2. Drastically reduces cached text embedding sizes

This commit is contained in:
Jaret Burkett
2026-01-28 12:54:49 -07:00
parent ea912d2d7b
commit 1ce2428722
7 changed files with 130 additions and 27 deletions

View File

@@ -229,6 +229,10 @@ class StableDiffusion:
# use new lokr format (default false for old models for backwards compatibility)
self.use_old_lokr_format = True
# when padding to make batch size work, which side padding to use, right or left
# some llms need left side padding, others need right side
self.te_padding_side = "right"
# properties for old arch for backwards compatibility
@property
def is_xl(self):