Skip to content

Commit

Permalink
Fix Pytorch 2.0 breakage for Lookahead optimizer adapter
Browse files Browse the repository at this point in the history
  • Loading branch information
rwightman committed Jun 2, 2023
1 parent cd950e6 commit 700aebc
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions timm/optim/lookahead.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
Hacked together by / Copyright 2020 Ross Wightman
"""
from collections import OrderedDict
from typing import Callable, Dict

import torch
from torch.optim.optimizer import Optimizer
from collections import defaultdict
Expand All @@ -12,6 +15,8 @@
class Lookahead(Optimizer):
def __init__(self, base_optimizer, alpha=0.5, k=6):
# NOTE super().__init__() not called on purpose
self._optimizer_step_pre_hooks: Dict[int, Callable] = OrderedDict()
self._optimizer_step_post_hooks: Dict[int, Callable] = OrderedDict()
if not 0.0 <= alpha <= 1.0:
raise ValueError(f'Invalid slow update rate: {alpha}')
if not 1 <= k:
Expand Down

0 comments on commit 700aebc

Please sign in to comment.