Offload ARA with the layer if doing layer offloading. Add support to offload the LoRA. Still needs optimizer support

This commit is contained in:
Jaret Burkett
2025-10-21 06:03:27 -06:00
parent 76ce757e0c
commit 0d8a33dc16
5 changed files with 37 additions and 0 deletions

View File

@@ -208,6 +208,9 @@ class NetworkConfig:
# for multi stage models
self.split_multistage_loras = kwargs.get('split_multistage_loras', True)
# ramtorch, doesn't work yet
self.layer_offloading = kwargs.get('layer_offloading', False)
AdapterTypes = Literal['t2i', 'ip', 'ip+', 'clip', 'ilora', 'photo_maker', 'control_net', 'control_lora', 'i2v']