Skip to content

Commit

Permalink
[R-package] [docs] Simplified examples to cut example run time (fixes m…
Browse files Browse the repository at this point in the history
…icrosoft#2988) (microsoft#2989)

* [R-package] [docs] Simplified examles to cut example run time (fixes microsoft#2988)

* updated learning rates
  • Loading branch information
jameslamb authored Apr 15, 2020
1 parent 151bf07 commit 1816167
Show file tree
Hide file tree
Showing 46 changed files with 132 additions and 124 deletions.
21 changes: 10 additions & 11 deletions R-package/R/lgb.Booster.R
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,6 @@ Booster <- R6::R6Class(
#' number of columns corresponding to the number of trees.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand All @@ -723,11 +722,10 @@ Booster <- R6::R6Class(
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' )
#' preds <- predict(model, test$data)
#' @export
Expand Down Expand Up @@ -769,7 +767,7 @@ predict.lgb.Booster <- function(object,
#' @return lgb.Booster
#'
#' @examples
#' library(lightgbm)
#' \donttest{
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand All @@ -781,17 +779,17 @@ predict.lgb.Booster <- function(object,
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' , early_stopping_rounds = 3L
#' )
#' lgb.save(model, "model.txt")
#' load_booster <- lgb.load(filename = "model.txt")
#' model_string <- model$save_model_to_string(NULL) # saves best iteration
#' load_booster_from_str <- lgb.load(model_str = model_string)
#'
#' }
#' @export
lgb.load <- function(filename = NULL, model_str = NULL) {

Expand Down Expand Up @@ -828,6 +826,7 @@ lgb.load <- function(filename = NULL, model_str = NULL) {
#' @return lgb.Booster
#'
#' @examples
#' \donttest{
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
Expand All @@ -847,6 +846,7 @@ lgb.load <- function(filename = NULL, model_str = NULL) {
#' , early_stopping_rounds = 5L
#' )
#' lgb.save(model, "model.txt")
#' }
#' @export
lgb.save <- function(booster, filename, num_iteration = NULL) {

Expand Down Expand Up @@ -874,6 +874,7 @@ lgb.save <- function(booster, filename, num_iteration = NULL) {
#' @return json format of model
#'
#' @examples
#' \donttest{
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
Expand All @@ -893,7 +894,7 @@ lgb.save <- function(booster, filename, num_iteration = NULL) {
#' , early_stopping_rounds = 5L
#' )
#' json_model <- lgb.dump(model)
#'
#' }
#' @export
lgb.dump <- function(booster, num_iteration = NULL) {

Expand Down Expand Up @@ -922,7 +923,6 @@ lgb.dump <- function(booster, num_iteration = NULL) {
#'
#' @examples
#' # train a regression model
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand All @@ -934,11 +934,10 @@ lgb.dump <- function(booster, num_iteration = NULL) {
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , valids = valids
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' )
#'
#' # Examine valid data_name values
Expand Down
11 changes: 0 additions & 11 deletions R-package/R/lgb.Dataset.R
Original file line number Diff line number Diff line change
Expand Up @@ -725,7 +725,6 @@ Dataset <- R6::R6Class(
#' @return constructed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -770,7 +769,6 @@ lgb.Dataset <- function(data,
#' @return constructed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand All @@ -797,7 +795,6 @@ lgb.Dataset.create.valid <- function(dataset, data, info = list(), ...) {
#' @param dataset Object of class \code{lgb.Dataset}
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -828,7 +825,6 @@ lgb.Dataset.construct <- function(dataset) {
#' be directly used with an \code{lgb.Dataset} object.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -863,7 +859,6 @@ dim.lgb.Dataset <- function(x, ...) {
#' Since row names are irrelevant, it is recommended to use \code{colnames} directly.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -936,7 +931,6 @@ dimnames.lgb.Dataset <- function(x) {
#' @return constructed sub dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -983,7 +977,6 @@ slice.lgb.Dataset <- function(dataset, idxset, ...) {
#' }
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -1037,7 +1030,6 @@ getinfo.lgb.Dataset <- function(dataset, name, ...) {
#' }
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -1078,7 +1070,6 @@ setinfo.lgb.Dataset <- function(dataset, name, info, ...) {
#' @return passed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -1109,7 +1100,6 @@ lgb.Dataset.set.categorical <- function(dataset, categorical_feature) {
#' @return passed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package ="lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down Expand Up @@ -1141,7 +1131,6 @@ lgb.Dataset.set.reference <- function(dataset, reference) {
#' @return passed dataset
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
Expand Down
4 changes: 1 addition & 3 deletions R-package/R/lgb.cv.R
Original file line number Diff line number Diff line change
Expand Up @@ -56,19 +56,17 @@ CVBooster <- R6::R6Class(
#' @return a trained model \code{lgb.CVBooster}.
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' params <- list(objective = "regression", metric = "l2")
#' model <- lgb.cv(
#' params = params
#' , data = dtrain
#' , nrounds = 10L
#' , nrounds = 5L
#' , nfold = 3L
#' , min_data = 1L
#' , learning_rate = 1.0
#' , early_stopping_rounds = 5L
#' )
#' @importFrom data.table data.table setorderv
#' @export
Expand Down
10 changes: 6 additions & 4 deletions R-package/R/lgb.importance.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,22 @@
#' }
#'
#' @examples
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , learning_rate = 0.1
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 5L
#' )
#'
#' tree_imp1 <- lgb.importance(model, percentage = TRUE)
#' tree_imp2 <- lgb.importance(model, percentage = FALSE)
Expand Down
10 changes: 6 additions & 4 deletions R-package/R/lgb.interprete.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
#' Contribution columns to each class.
#'
#' @examples
#' Sigmoid <- function(x) 1.0 / (1.0 + exp(-x))
#' Logit <- function(x) log(x / (1.0 - x))
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
Expand All @@ -27,13 +26,16 @@
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , learning_rate = 0.1
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 3L
#' )
#'
#' tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)
#'
Expand Down
12 changes: 7 additions & 5 deletions R-package/R/lgb.plot.importance.R
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,19 @@
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , max_depth = -1L
#' , learning_rate = 0.1
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#'
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 5L
#' )
#'
#' tree_imp <- lgb.importance(model, percentage = TRUE)
#' lgb.plot.importance(tree_imp, top_n = 10L, measure = "Gain")
#' lgb.plot.importance(tree_imp, top_n = 5L, measure = "Gain")
#' @importFrom graphics barplot par
#' @export
lgb.plot.importance <- function(tree_imp,
Expand Down
39 changes: 27 additions & 12 deletions R-package/R/lgb.plot.interpretation.R
Original file line number Diff line number Diff line change
Expand Up @@ -15,28 +15,43 @@
#' The \code{lgb.plot.interpretation} function creates a \code{barplot}.
#'
#' @examples
#' library(lightgbm)
#' Sigmoid <- function(x) {1.0 / (1.0 + exp(-x))}
#' Logit <- function(x) {log(x / (1.0 - x))}
#' \donttest{
#' Logit <- function(x) {
#' log(x / (1.0 - x))
#' }
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label)))
#' labels <- agaricus.train$label
#' dtrain <- lgb.Dataset(
#' agaricus.train$data
#' , label = labels
#' )
#' setinfo(dtrain, "init_score", rep(Logit(mean(labels)), length(labels)))
#'
#' data(agaricus.test, package = "lightgbm")
#' test <- agaricus.test
#'
#' params <- list(
#' objective = "binary"
#' , learning_rate = 0.01
#' , num_leaves = 63L
#' , learning_rate = 0.1
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' )
#' model <- lgb.train(params, dtrain, 10L)
#' model <- lgb.train(
#' params = params
#' , data = dtrain
#' , nrounds = 5L
#' )
#'
#' tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)
#' lgb.plot.interpretation(tree_interpretation[[1L]], top_n = 10L)
#' tree_interpretation <- lgb.interprete(
#' model = model
#' , data = agaricus.test$data
#' , idxset = 1L:5L
#' )
#' lgb.plot.interpretation(
#' tree_interpretation_dt = tree_interpretation[[1L]]
#' , top_n = 5L
#' )
#' }
#' @importFrom data.table setnames
#' @importFrom graphics barplot par
#' @export
Expand Down
1 change: 0 additions & 1 deletion R-package/R/lgb.prepare.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
#' for input in \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)
Expand Down
1 change: 0 additions & 1 deletion R-package/R/lgb.prepare2.R
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
#' for input in \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)
Expand Down
1 change: 0 additions & 1 deletion R-package/R/lgb.prepare_rules.R
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
#' in \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)
Expand Down
1 change: 0 additions & 1 deletion R-package/R/lgb.prepare_rules2.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
#' \code{lgb.Dataset}.
#'
#' @examples
#' library(lightgbm)
#' data(iris)
#'
#' str(iris)
Expand Down
Loading

0 comments on commit 1816167

Please sign in to comment.