Skip to content

Commit

Permalink
Add optimizer parameter to PolyScheduler constructor. (#295)
Browse files Browse the repository at this point in the history
  • Loading branch information
tanganke authored Nov 27, 2024
1 parent 1524b89 commit 6610d2f
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions pytorch_optimizer/lr_scheduler/linear_warmup.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@ class PolyScheduler(BaseLinearWarmupScheduler):
:param poly_order: float. lr scheduler decreases with steps.
"""

def __init__(self, poly_order: float = 0.5, **kwargs):
def __init__(self, optimizer, poly_order: float = 0.5, **kwargs):
self.poly_order = poly_order

if poly_order <= 0:
raise ValueError(f'[-] poly_order must be positive. {poly_order}')

super().__init__(**kwargs)
super().__init__(optimizer, **kwargs)

def _step(self) -> float:
return self.min_lr + (self.max_lr - self.min_lr) * (self.step_t - self.warmup_steps) ** self.poly_order

0 comments on commit 6610d2f

Please sign in to comment.