Skip to content

Commit

Permalink
update: AdamG optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
kozistr committed Aug 13, 2024
1 parent 35dcf77 commit d728f9e
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 1 deletion.
2 changes: 2 additions & 0 deletions pytorch_optimizer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
from pytorch_optimizer.optimizer.adalite import Adalite
from pytorch_optimizer.optimizer.adam_mini import AdamMini
from pytorch_optimizer.optimizer.adamax import AdaMax
from pytorch_optimizer.optimizer.adamg import AdamG
from pytorch_optimizer.optimizer.adamod import AdaMod
from pytorch_optimizer.optimizer.adamp import AdamP
from pytorch_optimizer.optimizer.adams import AdamS
Expand Down Expand Up @@ -206,6 +207,7 @@
StableAdamW,
AdamMini,
AdaLOMO,
AdamG,
]
OPTIMIZERS: Dict[str, OPTIMIZER] = {str(optimizer.__name__).lower(): optimizer for optimizer in OPTIMIZER_LIST}

Expand Down
3 changes: 3 additions & 0 deletions tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
Adai,
Adalite,
AdaMax,
AdamG,
AdaMod,
AdamP,
AdamS,
Expand Down Expand Up @@ -136,6 +137,7 @@
'grokfastadamw',
'stableadamw',
'adammini',
'adamg',
]

VALID_LR_SCHEDULER_NAMES: List[str] = [
Expand Down Expand Up @@ -468,6 +470,7 @@
(GrokFastAdamW, {'lr': 1e0, 'weight_decay': 1e-3}, 10),
(Kate, {'lr': 5e-2}, 10),
(StableAdamW, {'lr': 1e0}, 5),
(AdamG, {'lr': 1e0}, 20),
]
ADANORM_SUPPORTED_OPTIMIZERS: List[Tuple[Any, Dict[str, Union[float, bool, int]], int]] = [
(AdaBelief, {'lr': 5e-1, 'weight_decay': 1e-3, 'adanorm': True}, 10),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_load_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test_load_lr_scheduler_invalid(invalid_lr_scheduler_names):


def test_get_supported_optimizers():
assert len(get_supported_optimizers()) == 73
assert len(get_supported_optimizers()) == 74


def test_get_supported_lr_schedulers():
Expand Down

0 comments on commit d728f9e

Please sign in to comment.