Skip to content

Commit

Permalink
[python] remove learning_rates argument of train() function(#4831)
Browse files Browse the repository at this point in the history
  • Loading branch information
StrikerRUS authored Nov 29, 2021
1 parent 2f5d898 commit b81f7dd
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 14 deletions.
8 changes: 4 additions & 4 deletions examples/python-guide/advanced_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,15 +112,15 @@
print('Finished 10 - 20 rounds with model file...')

# decay learning rates
# learning_rates accepts:
# 1. list/tuple with length = num_boost_round
# reset_parameter callback accepts:
# 1. list with length = num_boost_round
# 2. function(curr_iter)
gbm = lgb.train(params,
lgb_train,
num_boost_round=10,
init_model=gbm,
learning_rates=lambda iter: 0.05 * (0.99 ** iter),
valid_sets=lgb_eval)
valid_sets=lgb_eval,
callbacks=[lgb.reset_parameter(learning_rate=lambda iter: 0.05 * (0.99 ** iter))])

print('Finished 20 - 30 rounds with decay learning rates...')

Expand Down
10 changes: 0 additions & 10 deletions python-package/lightgbm/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def train(
early_stopping_rounds: Optional[int] = None,
evals_result: Optional[Dict[str, Any]] = None,
verbose_eval: Union[bool, int, str] = 'warn',
learning_rates: Optional[Union[List[float], Callable[[int], float]]] = None,
keep_training_booster: bool = False,
callbacks: Optional[List[Callable]] = None
) -> Booster:
Expand Down Expand Up @@ -145,10 +144,6 @@ def train(
With ``verbose_eval`` = 4 and at least one item in ``valid_sets``,
an evaluation metric is printed every 4 (instead of 1) boosting stages.
learning_rates : list, callable or None, optional (default=None)
List of learning rates for each boosting round
or a callable that calculates ``learning_rate``
in terms of current number of round (e.g. yields learning rate decay).
keep_training_booster : bool, optional (default=False)
Whether the returned Booster will be used to keep training.
If False, the returned value will be converted into _InnerPredictor before returning.
Expand Down Expand Up @@ -251,11 +246,6 @@ def train(
if early_stopping_rounds is not None and early_stopping_rounds > 0:
callbacks.add(callback.early_stopping(early_stopping_rounds, first_metric_only, verbose=bool(verbose_eval)))

if learning_rates is not None:
_log_warning("'learning_rates' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'reset_parameter()' callback via 'callbacks' argument instead.")
callbacks.add(callback.reset_parameter(learning_rate=learning_rates))

if evals_result is not None:
_log_warning("'evals_result' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'record_evaluation()' callback via 'callbacks' argument instead.")
Expand Down

0 comments on commit b81f7dd

Please sign in to comment.