mirror of
https://github.com/ostris/ai-toolkit.git
synced 2026-02-21 12:53:56 +00:00
fixed issue with embed name, save whole config to dir instead of just process so it can be easily shared. Only make one config, no timesteps
This commit is contained in:
@@ -284,8 +284,8 @@ class BaseSDTrainProcess(BaseTrainProcess):
|
||||
# change filename to pt if that is set
|
||||
if self.embed_config.save_format == "pt":
|
||||
# replace extension
|
||||
file_path = os.path.splitext(emb_file_path)[0] + ".pt"
|
||||
self.embedding.save(file_path)
|
||||
emb_file_path = os.path.splitext(emb_file_path)[0] + ".pt"
|
||||
self.embedding.save(emb_file_path)
|
||||
else:
|
||||
self.sd.save(
|
||||
file_path,
|
||||
|
||||
@@ -30,7 +30,8 @@ class BaseTrainProcess(BaseProcess):
|
||||
|
||||
self.progress_bar = None
|
||||
self.writer = None
|
||||
self.training_folder = self.get_conf('training_folder', self.job.training_folder if hasattr(self.job, 'training_folder') else None)
|
||||
self.training_folder = self.get_conf('training_folder',
|
||||
self.job.training_folder if hasattr(self.job, 'training_folder') else None)
|
||||
self.save_root = os.path.join(self.training_folder, self.name)
|
||||
self.step = 0
|
||||
self.first_step = 0
|
||||
@@ -62,8 +63,7 @@ class BaseTrainProcess(BaseProcess):
|
||||
self.writer = SummaryWriter(summary_dir)
|
||||
|
||||
def save_training_config(self):
|
||||
timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')
|
||||
os.makedirs(self.save_root, exist_ok=True)
|
||||
save_dif = os.path.join(self.save_root, f'process_config_{timestamp}.yaml')
|
||||
save_dif = os.path.join(self.save_root, f'config.yaml')
|
||||
with open(save_dif, 'w') as f:
|
||||
yaml.dump(self.raw_process_config, f)
|
||||
yaml.dump(self.job.raw_config, f)
|
||||
|
||||
Reference in New Issue
Block a user