diff --git a/src/refiners/training_utils/config.py b/src/refiners/training_utils/config.py index 7b5941541..57921e2ea 100644 --- a/src/refiners/training_utils/config.py +++ b/src/refiners/training_utils/config.py @@ -1,5 +1,5 @@ from enum import Enum -from logging import warn +from logging import warning from pathlib import Path from typing import Annotated, Any, Callable, Iterable, Literal, Type, TypeVar @@ -132,7 +132,7 @@ def get(self, params: ParamsT) -> Optimizer: ) case Optimizers.Prodigy: if self.learning_rate != 1.0: - warn("Prodigy learning rate is not 1.0, this might cause instability.") + warning("Prodigy learning rate is not 1.0, this might cause instability.") return Prodigy( lr=self.learning_rate, params=params,