diff --git a/R-package/NAMESPACE b/R-package/NAMESPACE index 92812c50dd73..b356ba927177 100644 --- a/R-package/NAMESPACE +++ b/R-package/NAMESPACE @@ -48,5 +48,6 @@ importFrom(graphics,par) importFrom(jsonlite,fromJSON) importFrom(methods,is) importFrom(stats,quantile) +importFrom(utils,modifyList) importFrom(utils,read.delim) useDynLib(lib_lightgbm , .registration = TRUE) diff --git a/R-package/R/lgb.Booster.R b/R-package/R/lgb.Booster.R index d9e0186f97b1..fad82864fe18 100644 --- a/R-package/R/lgb.Booster.R +++ b/R-package/R/lgb.Booster.R @@ -1,4 +1,5 @@ #' @importFrom R6 R6Class +#' @importFrom utils modifyList Booster <- R6::R6Class( classname = "lgb.Booster", cloneable = FALSE, @@ -38,7 +39,7 @@ Booster <- R6::R6Class( stop("lgb.Booster: Can only use lgb.Dataset as training data") } train_set_handle <- train_set$.__enclos_env__$private$get_handle() - params <- modifyList(params, train_set$get_params()) + params <- utils::modifyList(params, train_set$get_params()) params_str <- lgb.params2str(params = params) # Store booster handle handle <- .Call( @@ -176,11 +177,21 @@ Booster <- R6::R6Class( reset_parameter = function(params, ...) { + additional_params <- list(...) + if (length(additional_params) > 0L) { + warning(paste0( + "Booster$reset_parameter(): Found the following passed through '...': " + , paste(names(additional_params), collapse = ", ") + , ". These will be used, but in future releases of lightgbm, this warning will become an error. " + , "Add these to 'params' instead." + )) + } + if (methods::is(self$params, "list")) { - params <- modifyList(self$params, params) + params <- utils::modifyList(self$params, params) } - params <- modifyList(params, list(...)) + params <- utils::modifyList(params, additional_params) params_str <- lgb.params2str(params = params) .Call( @@ -469,8 +480,19 @@ Booster <- R6::R6Class( predcontrib = FALSE, header = FALSE, reshape = FALSE, + params = list(), ...) { + additional_params <- list(...) + if (length(additional_params) > 0L) { + warning(paste0( + "Booster$predict(): Found the following passed through '...': " + , paste(names(additional_params), collapse = ", ") + , ". These will be used, but in future releases of lightgbm, this warning will become an error. " + , "Add these to 'params' instead. See ?predict.lgb.Booster for documentation on how to call this function." + )) + } + if (is.null(num_iteration)) { num_iteration <- self$best_iter } @@ -480,7 +502,7 @@ Booster <- R6::R6Class( } # Predict on new data - params <- list(...) + params <- utils::modifyList(params, additional_params) predictor <- Predictor$new( modelfile = private$handle , params = params @@ -699,8 +721,11 @@ Booster <- R6::R6Class( #' @param header only used for prediction for text file. True if text file has header #' @param reshape whether to reshape the vector of predictions to a matrix form when there are several #' prediction outputs per case. -#' @param ... Additional named arguments passed to the \code{predict()} method of -#' the \code{lgb.Booster} object passed to \code{object}. +#' @param params a list of additional named parameters. See +#' \href{https://lightgbm.readthedocs.io/en/latest/Parameters.html#predict-parameters}{ +#' the "Predict Parameters" section of the documentation} for a list of parameters and +#' valid values. +#' @param ... Additional prediction parameters. NOTE: deprecated as of v3.3.0. Use \code{params} instead. #' @return For regression or binary classification, it returns a vector of length \code{nrows(data)}. #' For multiclass classification, either a \code{num_class * nrows(data)} vector or #' a \code{(nrows(data), num_class)} dimension matrix is returned, depending on @@ -728,7 +753,17 @@ Booster <- R6::R6Class( #' , learning_rate = 1.0 #' ) #' preds <- predict(model, test$data) +#' +#' # pass other prediction parameters +#' predict( +#' model, +#' test$data, +#' params = list( +#' predict_disable_shape_check = TRUE +#' ) +#' ) #' } +#' @importFrom utils modifyList #' @export predict.lgb.Booster <- function(object, data, @@ -739,12 +774,23 @@ predict.lgb.Booster <- function(object, predcontrib = FALSE, header = FALSE, reshape = FALSE, + params = list(), ...) { if (!lgb.is.Booster(x = object)) { stop("predict.lgb.Booster: object should be an ", sQuote("lgb.Booster")) } + additional_params <- list(...) + if (length(additional_params) > 0L) { + warning(paste0( + "predict.lgb.Booster: Found the following passed through '...': " + , paste(names(additional_params), collapse = ", ") + , ". These will be used, but in future releases of lightgbm, this warning will become an error. " + , "Add these to 'params' instead. See ?predict.lgb.Booster for documentation on how to call this function." + )) + } + return( object$predict( data = data @@ -755,7 +801,7 @@ predict.lgb.Booster <- function(object, , predcontrib = predcontrib , header = header , reshape = reshape - , ... + , params = utils::modifyList(params, additional_params) ) ) } diff --git a/R-package/R/lgb.Dataset.R b/R-package/R/lgb.Dataset.R index def2d2ebecf1..7d31d78d82ca 100644 --- a/R-package/R/lgb.Dataset.R +++ b/R-package/R/lgb.Dataset.R @@ -8,6 +8,7 @@ #' @importFrom methods is #' @importFrom R6 R6Class +#' @importFrom utils modifyList Dataset <- R6::R6Class( classname = "lgb.Dataset", @@ -535,7 +536,7 @@ Dataset <- R6::R6Class( return(invisible(self)) } if (lgb.is.null.handle(x = private$handle)) { - private$params <- modifyList(private$params, params) + private$params <- utils::modifyList(private$params, params) } else { tryCatch({ .Call( @@ -552,7 +553,7 @@ Dataset <- R6::R6Class( # If updating failed but raw data is available, modify the params # on the R side and re-set ("deconstruct") the Dataset - private$params <- modifyList(private$params, params) + private$params <- utils::modifyList(private$params, params) self$finalize() }) } diff --git a/R-package/R/lgb.cv.R b/R-package/R/lgb.cv.R index dc0e7d82b88b..c011935080bc 100644 --- a/R-package/R/lgb.cv.R +++ b/R-package/R/lgb.cv.R @@ -12,7 +12,9 @@ CVBooster <- R6::R6Class( return(invisible(NULL)) }, reset_parameter = function(new_params) { - for (x in boosters) { x$reset_parameter(new_params) } + for (x in boosters) { + x$reset_parameter(params = new_params) + } return(invisible(self)) } ) diff --git a/R-package/man/predict.lgb.Booster.Rd b/R-package/man/predict.lgb.Booster.Rd index c1c4cfb0cc77..e0d66663fdc7 100644 --- a/R-package/man/predict.lgb.Booster.Rd +++ b/R-package/man/predict.lgb.Booster.Rd @@ -14,6 +14,7 @@ predcontrib = FALSE, header = FALSE, reshape = FALSE, + params = list(), ... ) } @@ -45,8 +46,12 @@ for logistic regression would result in predictions for log-odds instead of prob \item{reshape}{whether to reshape the vector of predictions to a matrix form when there are several prediction outputs per case.} -\item{...}{Additional named arguments passed to the \code{predict()} method of -the \code{lgb.Booster} object passed to \code{object}.} +\item{params}{a list of additional named parameters. See +\href{https://lightgbm.readthedocs.io/en/latest/Parameters.html#predict-parameters}{ +the "Predict Parameters" section of the documentation} for a list of parameters and +valid values.} + +\item{...}{Additional prediction parameters. NOTE: deprecated as of v3.3.0. Use \code{params} instead.} } \value{ For regression or binary classification, it returns a vector of length \code{nrows(data)}. @@ -79,5 +84,14 @@ model <- lgb.train( , learning_rate = 1.0 ) preds <- predict(model, test$data) + +# pass other prediction parameters +predict( + model, + test$data, + params = list( + predict_disable_shape_check = TRUE + ) +) } } diff --git a/R-package/tests/testthat/test_basic.R b/R-package/tests/testthat/test_basic.R index e1e8b7114f8c..1fc9d97b3c97 100644 --- a/R-package/tests/testthat/test_basic.R +++ b/R-package/tests/testthat/test_basic.R @@ -381,7 +381,7 @@ test_that("lgb.cv() fit on linearly-relatead data improves when using linear lea cv_bst_linear <- lgb.cv( data = dtrain , nrounds = 10L - , params = modifyList(params, list(linear_tree = TRUE)) + , params = utils::modifyList(params, list(linear_tree = TRUE)) , nfold = 5L ) expect_is(cv_bst_linear, "lgb.CVBooster") @@ -1767,7 +1767,7 @@ test_that("lgb.train() fit on linearly-relatead data improves when using linear bst_linear <- lgb.train( data = dtrain , nrounds = 10L - , params = modifyList(params, list(linear_tree = TRUE)) + , params = utils::modifyList(params, list(linear_tree = TRUE)) , valids = list("train" = dtrain) ) expect_true(lgb.is.Booster(bst_linear)) @@ -1798,7 +1798,7 @@ test_that("lgb.train() w/ linear learner fails already-constructed dataset with bst_linear <- lgb.train( data = dtrain , nrounds = 10L - , params = modifyList(params, list(linear_tree = TRUE)) + , params = utils::modifyList(params, list(linear_tree = TRUE)) ) }, regexp = "Cannot change linear_tree after constructed Dataset handle") }) @@ -1839,7 +1839,7 @@ test_that("lgb.train() works with linear learners even if Dataset has missing va bst_linear <- lgb.train( data = dtrain , nrounds = 10L - , params = modifyList(params, list(linear_tree = TRUE)) + , params = utils::modifyList(params, list(linear_tree = TRUE)) , valids = list("train" = dtrain) ) expect_true(lgb.is.Booster(bst_linear)) @@ -1887,7 +1887,7 @@ test_that("lgb.train() works with linear learners, bagging, and a Dataset that h bst_linear <- lgb.train( data = dtrain , nrounds = 10L - , params = modifyList(params, list(linear_tree = TRUE)) + , params = utils::modifyList(params, list(linear_tree = TRUE)) , valids = list("train" = dtrain) ) expect_true(lgb.is.Booster(bst_linear)) @@ -1925,7 +1925,7 @@ test_that("lgb.train() works with linear learners and data where a feature has o bst_linear <- lgb.train( data = dtrain , nrounds = 10L - , params = modifyList(params, list(linear_tree = TRUE)) + , params = utils::modifyList(params, list(linear_tree = TRUE)) ) expect_true(lgb.is.Booster(bst_linear)) }) @@ -1964,7 +1964,7 @@ test_that("lgb.train() works with linear learners when Dataset has categorical f bst_linear <- lgb.train( data = dtrain , nrounds = 10L - , params = modifyList(params, list(linear_tree = TRUE)) + , params = utils::modifyList(params, list(linear_tree = TRUE)) , valids = list("train" = dtrain) ) expect_true(lgb.is.Booster(bst_linear))