Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: use tsk(), stopf(), and minor lints #293

Merged
merged 4 commits into from
Aug 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions .lintr
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,5 @@ linters: linters_with_defaults(
object_name_linter = object_name_linter(c("snake_case", "CamelCase")), # only allow snake case and camel case object names
cyclocomp_linter = NULL, # do not check function complexity
commented_code_linter = NULL, # allow code in comments
line_length_linter = line_length_linter(120)
)

line_length_linter = line_length_linter(120L)
)
4 changes: 2 additions & 2 deletions R/LearnerClassifXgboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ LearnerClassifXgboost = R6Class("LearnerClassifXgboost",
}

if (self$predict_type == "prob" && pv$objective == "multi:softmax") {
stop("objective = 'multi:softmax' does not work with predict_type = 'prob'")
stopf("objective = 'multi:softmax' does not work with predict_type = 'prob'")
}

switch(pv$objective,
Expand Down Expand Up @@ -311,7 +311,7 @@ LearnerClassifXgboost = R6Class("LearnerClassifXgboost",
pars = self$param_set$get_values(tags = "train")
pars_train = self$state$param_vals
if (!is.null(pars_train$early_stopping_rounds)) {
stop("The parameter `early_stopping_rounds` is set. Early stopping and hotstarting are incompatible.")
stopf("The parameter `early_stopping_rounds` is set. Early stopping and hotstarting are incompatible.")
}

# Calculate additional boosting iterations
Expand Down
6 changes: 2 additions & 4 deletions R/LearnerRegrKM.R
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,8 @@ LearnerRegrKM = R6Class("LearnerRegrKM",
data = as_numeric_matrix(task$data(cols = task$feature_names))
truth = task$truth()

if (!is.null(pv$optim.method)) {
if (pv$optim.method == "gen" && !requireNamespace("rgenoud", quietly = TRUE)) {
stop("The 'rgenoud' package is required for optimization method 'gen'.")
}
if (!is.null(pv$optim.method) && pv$optim.method == "gen" && !requireNamespace("rgenoud", quietly = TRUE)) {
stopf("The 'rgenoud' package is required for optimization method 'gen'.")
}

ns = pv$nugget.stability
Expand Down
4 changes: 2 additions & 2 deletions R/LearnerRegrNnet.R
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ LearnerRegrNnet = R6Class("LearnerRegrNnet",
id = "regr.nnet",
packages = c("mlr3learners", "nnet"),
feature_types = c("numeric", "factor", "ordered", "integer"),
predict_types = c("response"),
predict_types = "response",
param_set = ps,
properties = c("weights"),
properties = "weights",
label = "Single Layer Neural Network",
man = "mlr3learners::mlr_learners_regr.nnet"
)
Expand Down
2 changes: 1 addition & 1 deletion R/LearnerRegrXgboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ LearnerRegrXgboost = R6Class("LearnerRegrXgboost",
pars = self$param_set$get_values(tags = "train")
pars_train = self$state$param_vals
if (!is.null(pars_train$early_stopping_rounds)) {
stop("The parameter `early_stopping_rounds` is set. Early stopping and hotstarting are incompatible.")
stopf("The parameter `early_stopping_rounds` is set. Early stopping and hotstarting are incompatible.")
}

# Calculate additional boosting iterations
Expand Down
12 changes: 6 additions & 6 deletions inst/paramtest/test_paramtest_classif.xgboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@ xp = rvest::html_elements(x, "p")
x = c(rvest::html_text2(xli), rvest::html_text2(xp))

add_params_xgboost = x %>%
grep("default=", ., value = T) %>%
strsplit(., split = " ") %>%
mlr3misc::map_chr(., function(x) x[1]) %>%
gsub(",", replacement = "", .) %>%
grep("default=", ., value = TRUE) %>%
strsplit(., split = " ", fixed = TRUE) %>%
mlr3misc::map_chr(1L) %>%
gsub(",", replacement = "", ., fixed = TRUE) %>%
## these are defined on the same line as colsample_bytree and cannot be scraped therefore
append(values = c("colsample_bylevel", "colsample_bynode")) %>%
# values which do not match regex
append(values = c("interaction_constraints", "monotone_constraints", "base_score")) %>%
# only defined in help page but not in signature or website
append(values = c("lambda_bias"))
append(values = "lambda_bias")

test_that("classif.xgboost", {
learner = lrn("classif.xgboost", nrounds = 1)
learner = lrn("classif.xgboost", nrounds = 1L)
fun = list(xgboost::xgb.train, xgboost::xgboost, add_params_xgboost)
exclude = c(
"x", # handled by mlr3
Expand Down
12 changes: 6 additions & 6 deletions inst/paramtest/test_paramtest_regr.xgboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@ xp = rvest::html_elements(x, "p")
x = c(rvest::html_text2(xli), rvest::html_text2(xp))

add_params_xgboost = x %>%
grep("default=", ., value = T) %>%
strsplit(., split = " ") %>%
mlr3misc::map_chr(., function(x) x[1]) %>%
gsub(",", replacement = "", .) %>%
grep("default=", ., value = TRUE) %>%
strsplit(., split = " ", fixed = TRUE) %>%
mlr3misc::map_chr(1L) %>%
gsub(",", replacement = "", ., fixed = TRUE) %>%
## these are defined on the same line as colsample_bytree and cannot be scraped therefore
append(values = c("colsample_bylevel", "colsample_bynode")) %>%
# values which do not match regex
append(values = c("interaction_constraints", "monotone_constraints", "base_score")) %>%
# only defined in help page but not in signature or website
append(values = c("lambda_bias"))
append(values = "lambda_bias")

test_that("regr.xgboost", {
learner = lrn("regr.xgboost", nrounds = 1)
learner = lrn("regr.xgboost", nrounds = 1L)
fun = list(xgboost::xgb.train, xgboost::xgboost, add_params_xgboost)
exclude = c(
"x", # handled by mlr3
Expand Down
4 changes: 2 additions & 2 deletions tests/testthat/test_classif_glmnet.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ skip_on_os("solaris") # glmnet not working properly on solaris
skip_if_not_installed("glmnet")

test_that("autotest", {
learner = mlr3::lrn("classif.glmnet", lambda = .1)
learner = mlr3::lrn("classif.glmnet", lambda = 0.1)
expect_learner(learner)

skip_on_os("solaris")
Expand All @@ -12,7 +12,7 @@ test_that("autotest", {

test_that("prob column reordering (#155)", {
task = tsk("sonar")
learner = mlr3::lrn("classif.glmnet", predict_type = "prob", lambda = .1)
learner = mlr3::lrn("classif.glmnet", predict_type = "prob", lambda = 0.1)

task$positive = "M"
learner$train(task)
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test_classif_ranger.R
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ test_that("mtry.ratio", {

test_that("convert_ratio", {
task = tsk("sonar")
learner = lrn("classif.ranger", num.trees = 5, mtry.ratio = .5)
learner = lrn("classif.ranger", num.trees = 5, mtry.ratio = 0.5)
expect_equal(learner$train(task)$model$mtry, 30)

learner$param_set$values$mtry.ratio = 0
Expand Down
4 changes: 2 additions & 2 deletions tests/testthat/test_classif_xgboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@ test_that("xgboost with softmax", {
})

test_that("xgboost with multi:softprob", {
task = mlr_tasks$get("sonar")
task = tsk("sonar")
learner = mlr3::lrn("classif.xgboost", nrounds = 5L, objective = "multi:softprob")
p = learner$train(task)$predict(task)
expect_equal(unname(p$score()), 0)
})

test_that("xgboost with binary:logistic", {
task = mlr_tasks$get("sonar")
task = tsk("sonar")
learner = mlr3::lrn("classif.xgboost", nrounds = 5L)
p = learner$train(task)$predict(task)
expect_equal(unname(p$score()), 0)
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test_regr_glmnet.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ skip_on_os("solaris") # glmnet not working properly on solaris
skip_if_not_installed("glmnet")

test_that("autotest", {
learner = mlr3::lrn("regr.glmnet", lambda = .1)
learner = mlr3::lrn("regr.glmnet", lambda = 0.1)
expect_learner(learner)

skip_on_os("solaris")
Expand Down