Skip to content

Commit

Permalink
[R-package] reduce verbosity in some unit tests (#4879)
Browse files Browse the repository at this point in the history
* [R-package] reduce verbosity in some unit tests

* simplify

* Update R-package/tests/testthat/test_lgb.plot.interpretation.R
  • Loading branch information
jameslamb authored Dec 18, 2021
1 parent 4523020 commit bd2e949
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 6 deletions.
25 changes: 25 additions & 0 deletions R-package/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
- [Installing from a Pre-compiled lib_lightgbm](#lib_lightgbm)
* [Examples](#examples)
* [Testing](#testing)
- [Running the Tests](#running-the-tests)
- [Code Coverage](#code-coverage)
* [Preparing a CRAN Package](#preparing-a-cran-package)
* [External Repositories](#external-unofficial-repositories)
* [Known Issues](#known-issues)
Expand Down Expand Up @@ -234,6 +236,29 @@ Testing

The R package's unit tests are run automatically on every commit, via integrations like [GitHub Actions](https://github.com/microsoft/LightGBM/actions). Adding new tests in `R-package/tests/testthat` is a valuable way to improve the reliability of the R package.

### Running the Tests

While developing the R package, run the code below to run the unit tests.

```shell
sh build-cran-package.sh \
--no-build-vignettes

R CMD INSTALL --with-keep.source lightgbm*.tar.gz
cd R-package/tests
Rscript testthat.R
```

To run the tests with more verbose logs, set environment variable `LIGHTGBM_TEST_VERBOSITY` to a valid value for parameter [`verbosity`](https://lightgbm.readthedocs.io/en/latest/Parameters.html#verbosity).

```shell
export LIGHTGBM_TEST_VERBOSITY=1
cd R-package/tests
Rscript testthat.R
```

### Code Coverage

When adding tests, you may want to use test coverage to identify untested areas and to check if the tests you've added are covering all branches of the intended code.

The example below shows how to generate code coverage for the R package on a macOS or Linux setup. To adjust for your environment, refer to [the customization step described above](#custom-installation-linux-mac).
Expand Down
9 changes: 7 additions & 2 deletions R-package/tests/testthat/test_Predictor.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
VERBOSITY <- as.integer(
Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

context("Predictor")

test_that("Predictor$finalize() should not fail", {
Expand All @@ -9,7 +13,7 @@ test_that("Predictor$finalize() should not fail", {
, params = list(
objective = "regression"
)
, verbose = -1L
, verbose = VERBOSITY
, nrounds = 3L
)
model_file <- tempfile(fileext = ".model")
Expand Down Expand Up @@ -37,7 +41,7 @@ test_that("predictions do not fail for integer input", {
, params = list(
objective = "regression"
)
, verbose = -1L
, verbose = VERBOSITY
, nrounds = 3L
)
X_double <- X[c(1L, 51L, 101L), , drop = FALSE]
Expand Down Expand Up @@ -70,6 +74,7 @@ test_that("start_iteration works correctly", {
num_leaves = 4L
, learning_rate = 0.6
, objective = "binary"
, verbosity = VERBOSITY
)
, nrounds = 50L
, valids = list("test" = dtest)
Expand Down
6 changes: 6 additions & 0 deletions R-package/tests/testthat/test_lgb.interprete.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
VERBOSITY <- as.integer(
Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

context("lgb.interpete")

.sigmoid <- function(x) {
Expand Down Expand Up @@ -28,6 +32,7 @@ test_that("lgb.intereprete works as expected for binary classification", {
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, verbose = VERBOSITY
)
model <- lgb.train(
params = params
Expand Down Expand Up @@ -79,6 +84,7 @@ test_that("lgb.intereprete works as expected for multiclass classification", {
, num_class = 3L
, learning_rate = 0.00001
, min_data = 1L
, verbose = VERBOSITY
)
model <- lgb.train(
params = params
Expand Down
5 changes: 5 additions & 0 deletions R-package/tests/testthat/test_lgb.plot.importance.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
VERBOSITY <- as.integer(
Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

context("lgb.plot.importance()")

test_that("lgb.plot.importance() should run without error for well-formed inputs", {
Expand All @@ -11,6 +15,7 @@ test_that("lgb.plot.importance() should run without error for well-formed inputs
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, verbosity = VERBOSITY
)
model <- lgb.train(params, dtrain, 3L)
tree_imp <- lgb.importance(model, percentage = TRUE)
Expand Down
6 changes: 6 additions & 0 deletions R-package/tests/testthat/test_lgb.plot.interpretation.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
VERBOSITY <- as.integer(
Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

context("lgb.plot.interpretation")

.sigmoid <- function(x) {
Expand Down Expand Up @@ -28,6 +32,7 @@ test_that("lgb.plot.interepretation works as expected for binary classification"
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, verbosity = VERBOSITY
)
model <- lgb.train(
params = params
Expand Down Expand Up @@ -82,6 +87,7 @@ test_that("lgb.plot.interepretation works as expected for multiclass classificat
params = params
, data = dtrain
, nrounds = 3L
, verbose = VERBOSITY
)
num_trees <- 5L
tree_interpretation <- lgb.interprete(
Expand Down
7 changes: 7 additions & 0 deletions R-package/tests/testthat/test_lgb.unloader.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
VERBOSITY <- as.integer(
Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

context("lgb.unloader")

test_that("lgb.unloader works as expected", {
Expand All @@ -10,6 +14,7 @@ test_that("lgb.unloader works as expected", {
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, verbosity = VERBOSITY
)
, data = dtrain
, nrounds = 1L
Expand All @@ -30,6 +35,7 @@ test_that("lgb.unloader finds all boosters and removes them", {
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, verbosity = VERBOSITY
)
, data = dtrain
, nrounds = 1L
Expand All @@ -40,6 +46,7 @@ test_that("lgb.unloader finds all boosters and removes them", {
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, verbosity = VERBOSITY
)
, data = dtrain
, nrounds = 1L
Expand Down
12 changes: 8 additions & 4 deletions R-package/tests/testthat/test_weighted_loss.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
VERBOSITY <- as.integer(
Sys.getenv("LIGHTGBM_TEST_VERBOSITY", "-1")
)

context("Case weights are respected")

test_that("Gamma regression reacts on 'weight'", {
Expand All @@ -15,7 +19,7 @@ test_that("Gamma regression reacts on 'weight'", {
params = params
, data = dtrain
, nrounds = 4L
, verbose = 0L
, verbose = VERBOSITY
)
pred_unweighted <- predict(bst, X_pred)

Expand All @@ -29,7 +33,7 @@ test_that("Gamma regression reacts on 'weight'", {
params = params
, data = dtrain
, nrounds = 4L
, verbose = 0L
, verbose = VERBOSITY
)
pred_weighted_1 <- predict(bst, X_pred)

Expand All @@ -43,7 +47,7 @@ test_that("Gamma regression reacts on 'weight'", {
params = params
, data = dtrain
, nrounds = 4L
, verbose = 0L
, verbose = VERBOSITY
)
pred_weighted_2 <- predict(bst, X_pred)

Expand All @@ -57,7 +61,7 @@ test_that("Gamma regression reacts on 'weight'", {
params = params
, data = dtrain
, nrounds = 4L
, verbose = 0L
, verbose = VERBOSITY
)
pred_weighted <- predict(bst, X_pred)

Expand Down

0 comments on commit bd2e949

Please sign in to comment.