mirror of
https://github.com/ostris/ai-toolkit.git
synced 2026-03-13 22:49:48 +00:00
Bugfixes. Added small augmentations to dataloader. Will switch to abluminations soon though. Added ability to adjust step count on start to override what is in the file
This commit is contained in:
@@ -392,7 +392,7 @@ class BaseSDTrainProcess(BaseTrainProcess):
|
||||
def load_training_state_from_metadata(self, path):
|
||||
meta = load_metadata_from_safetensors(path)
|
||||
# if 'training_info' in Orderdict keys
|
||||
if 'training_info' in meta and 'step' in meta['training_info']:
|
||||
if 'training_info' in meta and 'step' in meta['training_info'] and self.train_config.start_step is None:
|
||||
self.step_num = meta['training_info']['step']
|
||||
self.start_step = self.step_num
|
||||
print(f"Found step {self.step_num} in metadata, starting from there")
|
||||
@@ -796,6 +796,10 @@ class BaseSDTrainProcess(BaseTrainProcess):
|
||||
else:
|
||||
self.params.append(param)
|
||||
|
||||
if self.train_config.start_step is not None:
|
||||
self.step_num = self.train_config.start_step
|
||||
self.start_step = self.step_num
|
||||
|
||||
optimizer_type = self.train_config.optimizer.lower()
|
||||
optimizer = get_optimizer(self.params, optimizer_type, learning_rate=self.train_config.lr,
|
||||
optimizer_params=self.train_config.optimizer_params)
|
||||
|
||||
Reference in New Issue
Block a user