Fix issue that prevented ramtorch layer offloading with z_image

This commit is contained in:
Jaret Burkett
2025-12-02 16:14:34 -07:00
parent d42f5af2fc
commit e6c5aead3b
2 changed files with 10 additions and 0 deletions

View File

@@ -1308,6 +1308,11 @@ def validate_configs(
if train_config.bypass_guidance_embedding and train_config.do_guidance_loss:
raise ValueError("Cannot bypass guidance embedding and do guidance loss at the same time. "
"Please set bypass_guidance_embedding to False or do_guidance_loss to False.")
if model_config.accuracy_recovery_adapter is not None:
if model_config.assistant_lora_path is not None:
raise ValueError("Cannot use accuracy recovery adapter and assistant lora at the same time. "
"Please set one of them to None.")
# see if any datasets are caching text embeddings
is_caching_text_embeddings = any(dataset.cache_text_embeddings for dataset in dataset_configs)