From 8c129778910019fbcaaff5f7eeca6db7b884bf53 Mon Sep 17 00:00:00 2001 From: Jaret Burkett Date: Sun, 26 Oct 2025 05:47:25 -0600 Subject: [PATCH] Fixed adafactor eps --- toolkit/optimizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/toolkit/optimizer.py b/toolkit/optimizer.py index 75d1442d..355512e9 100644 --- a/toolkit/optimizer.py +++ b/toolkit/optimizer.py @@ -93,7 +93,7 @@ def get_optimizer( optimizer_params['scale_parameter'] = False if 'warmup_init' not in optimizer_params: optimizer_params['warmup_init'] = False - optimizer = Adafactor(params, lr=float(learning_rate), eps=1e-6, **optimizer_params) + optimizer = Adafactor(params, lr=float(learning_rate), **optimizer_params) elif lower_type == 'automagic': from toolkit.optimizers.automagic import Automagic optimizer = Automagic(params, lr=float(learning_rate), **optimizer_params)