Work on slider rework

This commit is contained in:
Jaret Burkett
2023-07-28 18:11:10 -06:00
parent 5fc2bb5d9c
commit 1e50b39442
3 changed files with 187 additions and 134 deletions

View File

@@ -27,6 +27,17 @@ def get_optimizer(
optimizer = dadaptation.DAdaptAdam(params, lr=use_lr, **optimizer_params)
# warn user that dadaptation is deprecated
print("WARNING: Dadaptation optimizer type has been changed to DadaptationAdam. Please update your config.")
elif lower_type.startswith("prodigy"):
from prodigyopt import Prodigy
print("Using Prodigy optimizer")
use_lr = learning_rate
if use_lr < 0.1:
# dadaptation uses different lr that is values of 0.1 to 1.0. default to 1.0
use_lr = 1.0
# let net be the neural network you want to train
# you can choose weight decay value based on your problem, 0 by default
optimizer = Prodigy(params, lr=use_lr, **optimizer_params)
elif lower_type.endswith("8bit"):
import bitsandbytes