Skip to content

Commit

Permalink
[R] Error out on unrecognized arguments and ... parameters (#11074)
Browse files Browse the repository at this point in the history
  • Loading branch information
david-cortes authored Dec 9, 2024
1 parent a361896 commit 94c6714
Show file tree
Hide file tree
Showing 61 changed files with 1,159 additions and 737 deletions.
43 changes: 21 additions & 22 deletions R-package/R/callbacks.R
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
#' will be the same as parameter `begin_iteration`, then next one will add +1, and so on).
#'
#' - iter_feval Evaluation metrics for `evals` that were supplied, either
#' determined by the objective, or by parameter `feval`.
#' determined by the objective, or by parameter `custom_metric`.
#'
#' For [xgb.train()], this will be a named vector with one entry per element in
#' `evals`, where the names are determined as 'evals name' + '-' + 'metric name' - for
Expand Down Expand Up @@ -206,8 +206,7 @@
#' data = dm,
#' params = xgb.params(objective = "reg:squarederror", nthread = 1),
#' nrounds = 5,
#' callbacks = list(ssq_callback),
#' keep_extra_attributes = TRUE
#' callbacks = list(ssq_callback)
#' )
#'
#' # Result from 'f_after_iter' will be available as an attribute
Expand Down Expand Up @@ -451,7 +450,7 @@ xgb.cb.print.evaluation <- function(period = 1, showsd = TRUE) {
#' Callback for logging the evaluation history
#'
#' @details This callback creates a table with per-iteration evaluation metrics (see parameters
#' `evals` and `feval` in [xgb.train()]).
#' `evals` and `custom_metric` in [xgb.train()]).
#'
#' Note: in the column names of the final data.table, the dash '-' character is replaced with
#' the underscore '_' in order to make the column names more like regular R identifiers.
Expand Down Expand Up @@ -957,7 +956,7 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
#' label = 1 * (iris$Species == "versicolor"),
#' nthread = nthread
#' )
#' param <- list(
#' param <- xgb.params(
#' booster = "gblinear",
#' objective = "reg:logistic",
#' eval_metric = "auc",
Expand All @@ -971,11 +970,10 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
#' # rate does not break the convergence, but allows us to illustrate the typical pattern of
#' # "stochastic explosion" behaviour of this lock-free algorithm at early boosting iterations.
#' bst <- xgb.train(
#' param,
#' c(param, list(eta = 1.)),
#' dtrain,
#' list(tr = dtrain),
#' evals = list(tr = dtrain),
#' nrounds = 200,
#' eta = 1.,
#' callbacks = list(xgb.cb.gblinear.history())
#' )
#'
Expand All @@ -986,14 +984,18 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
#' # With the deterministic coordinate descent updater, it is safer to use higher learning rates.
#' # Will try the classical componentwise boosting which selects a single best feature per round:
#' bst <- xgb.train(
#' param,
#' c(
#' param,
#' xgb.params(
#' eta = 0.8,
#' updater = "coord_descent",
#' feature_selector = "thrifty",
#' top_k = 1
#' )
#' ),
#' dtrain,
#' list(tr = dtrain),
#' evals = list(tr = dtrain),
#' nrounds = 200,
#' eta = 0.8,
#' updater = "coord_descent",
#' feature_selector = "thrifty",
#' top_k = 1,
#' callbacks = list(xgb.cb.gblinear.history())
#' )
#' matplot(xgb.gblinear.history(bst), type = "l")
Expand All @@ -1003,11 +1005,10 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
#'
#' # For xgb.cv:
#' bst <- xgb.cv(
#' param,
#' c(param, list(eta = 0.8)),
#' dtrain,
#' nfold = 5,
#' nrounds = 100,
#' eta = 0.8,
#' callbacks = list(xgb.cb.gblinear.history())
#' )
#' # coefficients in the CV fold #3
Expand All @@ -1017,7 +1018,7 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
#' #### Multiclass classification:
#' dtrain <- xgb.DMatrix(scale(x), label = as.numeric(iris$Species) - 1, nthread = nthread)
#'
#' param <- list(
#' param <- xgb.params(
#' booster = "gblinear",
#' objective = "multi:softprob",
#' num_class = 3,
Expand All @@ -1029,11 +1030,10 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
#' # For the default linear updater 'shotgun' it sometimes is helpful
#' # to use smaller eta to reduce instability
#' bst <- xgb.train(
#' param,
#' c(param, list(eta = 0.5)),
#' dtrain,
#' list(tr = dtrain),
#' evals = list(tr = dtrain),
#' nrounds = 50,
#' eta = 0.5,
#' callbacks = list(xgb.cb.gblinear.history())
#' )
#'
Expand All @@ -1044,11 +1044,10 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
#'
#' # CV:
#' bst <- xgb.cv(
#' param,
#' c(param, list(eta = 0.5)),
#' dtrain,
#' nfold = 5,
#' nrounds = 70,
#' eta = 0.5,
#' callbacks = list(xgb.cb.gblinear.history(FALSE))
#' )
#' # 1st fold of 1st class
Expand Down
Loading

0 comments on commit 94c6714

Please sign in to comment.