Initial work on a auto adjusting optimizer

This commit is contained in:
Jaret Burkett
2024-11-29 04:48:58 -07:00
parent 67c2e44edb
commit cbe31eaf0a
2 changed files with 299 additions and 0 deletions

View File

@@ -95,6 +95,9 @@ def get_optimizer(
if 'warmup_init' not in optimizer_params:
optimizer_params['warmup_init'] = False
optimizer = Adafactor(params, lr=float(learning_rate), eps=1e-6, **optimizer_params)
elif lower_type == 'automagic':
from toolkit.optimizers.automagic import Automagic
optimizer = Automagic(params, lr=float(learning_rate), **optimizer_params)
else:
raise ValueError(f'Unknown optimizer type {optimizer_type}')
return optimizer