From 846e8954ac64e9daee1ca9076a000b8f0334231b Mon Sep 17 00:00:00 2001 From: James Lamb Date: Sun, 29 Aug 2021 02:01:02 +0100 Subject: [PATCH] [R-package] fix warnings in demos (#4569) * [R-package] fix warnings in demos * Apply suggestions from code review Co-authored-by: Nikita Titov * fix additional params issues in multiclass and categorical_feature examples * Update R-package/demo/multiclass.R Co-authored-by: Nikita Titov Co-authored-by: Nikita Titov --- .gitignore | 3 +- R-package/demo/basic_walkthrough.R | 49 +++++++------------- R-package/demo/categorical_features_rules.R | 1 - R-package/demo/early_stopping.R | 2 +- R-package/demo/leaf_stability.R | 31 +++++++++---- R-package/demo/multiclass.R | 24 ++++++---- R-package/demo/multiclass_custom_objective.R | 28 ++++++----- R-package/demo/weight_param.R | 12 ++--- 8 files changed, 78 insertions(+), 72 deletions(-) diff --git a/.gitignore b/.gitignore index 5a90094850b9..96e0700f4f49 100644 --- a/.gitignore +++ b/.gitignore @@ -326,7 +326,7 @@ coverage.xml .hypothesis/ **/coverage.html **/coverage.html.zip -R-package/tests/testthat/Rplots.pdf +**/Rplots.pdf # Translations *.mo @@ -427,6 +427,7 @@ miktex*.zip *.def # Files created by examples and tests +*.buffer **/lgb-Dataset.data **/lgb.Dataset.data **/model.txt diff --git a/R-package/demo/basic_walkthrough.R b/R-package/demo/basic_walkthrough.R index 6716bb894840..3dc672e11d73 100644 --- a/R-package/demo/basic_walkthrough.R +++ b/R-package/demo/basic_walkthrough.R @@ -12,6 +12,14 @@ test <- agaricus.test class(train$label) class(train$data) +# Set parameters for model training +train_params <- list( + num_leaves = 4L + , learning_rate = 1.0 + , objective = "binary" + , nthread = 2L +) + #--------------------Basic Training using lightgbm---------------- # This is the basic usage of lightgbm you can put matrix in data field # Note: we are putting in sparse matrix here, lightgbm naturally handles sparse input @@ -19,22 +27,18 @@ class(train$data) print("Training lightgbm with sparseMatrix") bst <- lightgbm( data = train$data + , params = train_params , label = train$label - , num_leaves = 4L - , learning_rate = 1.0 , nrounds = 2L - , objective = "binary" ) # Alternatively, you can put in dense matrix, i.e. basic R-matrix print("Training lightgbm with Matrix") bst <- lightgbm( data = as.matrix(train$data) + , params = train_params , label = train$label - , num_leaves = 4L - , learning_rate = 1.0 , nrounds = 2L - , objective = "binary" ) # You can also put in lgb.Dataset object, which stores label, data and other meta datas needed for advanced features @@ -45,42 +49,32 @@ dtrain <- lgb.Dataset( ) bst <- lightgbm( data = dtrain - , num_leaves = 4L - , learning_rate = 1.0 + , params = train_params , nrounds = 2L - , objective = "binary" ) # Verbose = 0,1,2 print("Train lightgbm with verbose 0, no message") bst <- lightgbm( data = dtrain - , num_leaves = 4L - , learning_rate = 1.0 + , params = train_params , nrounds = 2L - , objective = "binary" , verbose = 0L ) print("Train lightgbm with verbose 1, print evaluation metric") bst <- lightgbm( data = dtrain - , num_leaves = 4L - , learning_rate = 1.0 + , params = train_params , nrounds = 2L - , nthread = 2L - , objective = "binary" , verbose = 1L ) print("Train lightgbm with verbose 2, also print information about tree") bst <- lightgbm( data = dtrain - , num_leaves = 4L - , learning_rate = 1.0 + , params = train_params , nrounds = 2L - , nthread = 2L - , objective = "binary" , verbose = 2L ) @@ -126,25 +120,19 @@ valids <- list(train = dtrain, test = dtest) print("Train lightgbm using lgb.train with valids") bst <- lgb.train( data = dtrain - , num_leaves = 4L - , learning_rate = 1.0 + , params = train_params , nrounds = 2L , valids = valids - , nthread = 2L - , objective = "binary" ) # We can change evaluation metrics, or use multiple evaluation metrics print("Train lightgbm using lgb.train with valids, watch logloss and error") bst <- lgb.train( data = dtrain - , num_leaves = 4L - , learning_rate = 1.0 + , params = train_params , nrounds = 2L , valids = valids , eval = c("binary_error", "binary_logloss") - , nthread = 2L - , objective = "binary" ) # lgb.Dataset can also be saved using lgb.Dataset.save @@ -154,12 +142,9 @@ lgb.Dataset.save(dtrain, "dtrain.buffer") dtrain2 <- lgb.Dataset("dtrain.buffer") bst <- lgb.train( data = dtrain2 - , num_leaves = 4L - , learning_rate = 1.0 + , params = train_params , nrounds = 2L , valids = valids - , nthread = 2L - , objective = "binary" ) # information can be extracted from lgb.Dataset using getinfo diff --git a/R-package/demo/categorical_features_rules.R b/R-package/demo/categorical_features_rules.R index 68df09f24f3b..97af5a7896b5 100644 --- a/R-package/demo/categorical_features_rules.R +++ b/R-package/demo/categorical_features_rules.R @@ -85,7 +85,6 @@ params <- list( , metric = "l2" , min_data = 1L , learning_rate = 0.1 - , min_data = 0L , min_hessian = 1.0 , max_depth = 2L ) diff --git a/R-package/demo/early_stopping.R b/R-package/demo/early_stopping.R index f68b82dc3dcb..fa8abce38b08 100644 --- a/R-package/demo/early_stopping.R +++ b/R-package/demo/early_stopping.R @@ -46,7 +46,7 @@ bst <- lgb.train( , dtrain , num_round , valids - , objective = logregobj + , obj = logregobj , eval = evalerror , early_stopping_round = 3L ) diff --git a/R-package/demo/leaf_stability.R b/R-package/demo/leaf_stability.R index bad2e83107b1..af1c533ac5b1 100644 --- a/R-package/demo/leaf_stability.R +++ b/R-package/demo/leaf_stability.R @@ -85,18 +85,21 @@ test <- agaricus.test dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label) # setup parameters and we train a model -params <- list(objective = "regression", metric = "l2") +params <- list( + objective = "regression" + , metric = "l2" + , min_data = 1L + , learning_rate = 0.1 + , bagging_fraction = 0.1 + , bagging_freq = 1L + , bagging_seed = 1L +) valids <- list(test = dtest) model <- lgb.train( params , dtrain , 50L , valids - , min_data = 1L - , learning_rate = 0.1 - , bagging_fraction = 0.1 - , bagging_freq = 1L - , bagging_seed = 1L ) # We create a data.frame with the following structure: @@ -141,13 +144,17 @@ table(new_data$binned) .depth_density_plot(df = new_data) # Now, let's show with other parameters +params <- list( + objective = "regression" + , metric = "l2" + , min_data = 1L + , learning_rate = 1.0 +) model2 <- lgb.train( params , dtrain , 100L , valids - , min_data = 1L - , learning_rate = 1.0 ) # We create the data structure, but for model2 @@ -193,13 +200,17 @@ table(new_data2$binned) .depth_density_plot(df = new_data2) # Now, try with very severe overfitting +params <- list( + objective = "regression" + , metric = "l2" + , min_data = 1L + , learning_rate = 1.0 +) model3 <- lgb.train( params , dtrain , 1000L , valids - , min_data = 1L - , learning_rate = 1.0 ) # We create the data structure, but for model3 diff --git a/R-package/demo/multiclass.R b/R-package/demo/multiclass.R index 00b49e83f6de..0f52fecc0b26 100644 --- a/R-package/demo/multiclass.R +++ b/R-package/demo/multiclass.R @@ -18,14 +18,18 @@ dtest <- lgb.Dataset.create.valid(dtrain, data = test[, 1L:4L], label = test[, 5 valids <- list(test = dtest) # Method 1 of training -params <- list(objective = "multiclass", metric = "multi_error", num_class = 3L) +params <- list( + objective = "multiclass" + , metric = "multi_error" + , num_class = 3L + , min_data = 1L + , learning_rate = 1.0 +) model <- lgb.train( params , dtrain , 100L , valids - , min_data = 1L - , learning_rate = 1.0 , early_stopping_rounds = 10L ) @@ -34,17 +38,19 @@ model <- lgb.train( my_preds <- predict(model, test[, 1L:4L]) # Method 2 of training, identical +params <- list( + min_data = 1L + , learning_rate = 1.0 + , objective = "multiclass" + , metric = "multi_error" + , num_class = 3L +) model <- lgb.train( - list() + params , dtrain , 100L , valids - , min_data = 1L - , learning_rate = 1.0 , early_stopping_rounds = 10L - , objective = "multiclass" - , metric = "multi_error" - , num_class = 3L ) # We can predict on test data, identical diff --git a/R-package/demo/multiclass_custom_objective.R b/R-package/demo/multiclass_custom_objective.R index ec2ed90cdf64..70d5c6ce3f90 100644 --- a/R-package/demo/multiclass_custom_objective.R +++ b/R-package/demo/multiclass_custom_objective.R @@ -20,18 +20,20 @@ valids <- list(train = dtrain, test = dtest) # Method 1 of training with built-in multiclass objective # Note: need to turn off boost from average to match custom objective # (https://github.com/microsoft/LightGBM/issues/1846) +params <- list( + min_data = 1L + , learning_rate = 1.0 + , num_class = 3L + , boost_from_average = FALSE + , metric = "multi_logloss" +) model_builtin <- lgb.train( - list() + params , dtrain - , boost_from_average = FALSE , 100L , valids - , min_data = 1L - , learning_rate = 1.0 , early_stopping_rounds = 10L - , objective = "multiclass" - , metric = "multi_logloss" - , num_class = 3L + , obj = "multiclass" ) preds_builtin <- predict(model_builtin, test[, 1L:4L], rawscore = TRUE, reshape = TRUE) @@ -92,17 +94,19 @@ custom_multiclass_metric <- function(preds, dtrain) { )) } +params <- list( + min_data = 1L + , learning_rate = 1.0 + , num_class = 3L +) model_custom <- lgb.train( - list() + params , dtrain , 100L , valids - , min_data = 1L - , learning_rate = 1.0 , early_stopping_rounds = 10L - , objective = custom_multiclass_obj + , obj = custom_multiclass_obj , eval = custom_multiclass_metric - , num_class = 3L ) preds_custom <- predict(model_custom, test[, 1L:4L], rawscore = TRUE, reshape = TRUE) diff --git a/R-package/demo/weight_param.R b/R-package/demo/weight_param.R index 461b8caa79be..9702de41ece9 100644 --- a/R-package/demo/weight_param.R +++ b/R-package/demo/weight_param.R @@ -34,14 +34,14 @@ params <- list( , num_leaves = 7L , max_depth = 3L , nthread = 1L + , min_data = 1L + , learning_rate = 1.0 ) model <- lgb.train( params , dtrain , 50L , valids - , min_data = 1L - , learning_rate = 1.0 , early_stopping_rounds = 10L ) weight_loss <- as.numeric(model$record_evals$test$l2$eval) @@ -58,14 +58,14 @@ params <- list( , num_leaves = 7L , max_depth = 3L , nthread = 1L + , min_data = 1L + , learning_rate = 1.0 ) model <- lgb.train( params , dtrain , 50L , valids - , min_data = 1L - , learning_rate = 1.0 , early_stopping_rounds = 10L ) small_weight_loss <- as.numeric(model$record_evals$test$l2$eval) @@ -94,14 +94,14 @@ params <- list( , num_leaves = 7L , max_depth = 3L , nthread = 1L + , min_data = 1L + , learning_rate = 1.0 ) model <- lgb.train( params , dtrain , 50L , valids - , min_data = 1L - , learning_rate = 1.0 , early_stopping_rounds = 10L ) large_weight_loss <- as.numeric(model$record_evals$test$l2$eval)