Skip to content

Commit

Permalink
Clean up settings.debug
Browse files Browse the repository at this point in the history
Summary: In the past, `settings.debug` used to supress all `BoTorchWarnings`. Some time ago, we reduced its scope and started surfacing these warnings again. Since then, there are only a couple places where `settings.debug` is used. I was curious how much usage it had left and decided to clean it up completely while at it.

Differential Revision: D65498337
  • Loading branch information
saitcakmak authored and facebook-github-bot committed Nov 5, 2024
1 parent 3ca48d0 commit 3692626
Show file tree
Hide file tree
Showing 22 changed files with 165 additions and 271 deletions.
18 changes: 8 additions & 10 deletions botorch/acquisition/cost_aware.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from collections.abc import Callable

import torch
from botorch import settings
from botorch.acquisition.objective import (
GenericMCObjective,
IdentityMCObjective,
Expand All @@ -26,6 +25,7 @@
from botorch.models.deterministic import DeterministicModel
from botorch.models.gpytorch import GPyTorchModel
from botorch.sampling.base import MCSampler
from pyre_extensions import none_throws
from torch import Tensor
from torch.nn import Module

Expand Down Expand Up @@ -199,18 +199,16 @@ def forward(
cost = cost_posterior.mean # batch_shape x q x m'
else:
# This will be of shape num_fantasies x batch_shape x q x m'
cost = sampler(cost_posterior)
# TODO: Make sure this doesn't change base samples in-place
cost = none_throws(sampler)(cost_posterior)
cost = self.cost_objective(cost)

# Ensure non-negativity of the cost
if settings.debug.on():
if torch.any(cost < -1e-7):
warnings.warn(
"Encountered negative cost values in InverseCostWeightedUtility",
CostAwareWarning,
stacklevel=2,
)
if torch.any(cost < -1e-7):
warnings.warn(
"Encountered negative cost values in InverseCostWeightedUtility",
CostAwareWarning,
stacklevel=2,
)
# clamp (away from zero) and sum cost across elements of the q-batch -
# this will be of shape `num_fantasies x batch_shape` or `batch_shape`
cost = cost.clamp_min(self._min_cost).sum(dim=-1)
Expand Down
9 changes: 2 additions & 7 deletions botorch/fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
get_parameters,
sample_all_priors,
)
from botorch.settings import debug
from botorch.utils.context_managers import (
module_rollback_ctx,
parameter_rollback_ctx,
Expand Down Expand Up @@ -200,7 +199,7 @@ def _fit_fallback(

try:
# Fit the model
with catch_warnings(record=True) as warning_list, debug(True):
with catch_warnings(record=True) as warning_list:
simplefilter("always", category=OptimizationWarning)
result = optimizer(mll, closure=closure, **optimizer_kwargs)

Expand Down Expand Up @@ -250,11 +249,7 @@ def _fit_fallback(
mll.load_state_dict(best_state_dict)
return mll.eval()

msg = "All attempts to fit the model have failed."
if debug.off():
msg = msg + " For more information, try enabling botorch.settings.debug mode."

raise ModelFittingError(msg)
raise ModelFittingError("All attempts to fit the model have failed.")


@FitGPyTorchMLL.register(SumMarginalLogLikelihood, object, ModelListGP)
Expand Down
3 changes: 1 addition & 2 deletions botorch/optim/initializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from typing import Optional, Union

import torch
from botorch import settings
from botorch.acquisition import analytic, monte_carlo, multi_objective
from botorch.acquisition.acquisition import AcquisitionFunction
from botorch.acquisition.fixed_feature import FixedFeatureAcquisitionFunction
Expand Down Expand Up @@ -341,7 +340,7 @@ def gen_batch_initial_conditions(
q = 1 if q is None else q
# the dimension the samples are drawn from
effective_dim = bounds.shape[-1] * q
if effective_dim > SobolEngine.MAXDIM and settings.debug.on():
if effective_dim > SobolEngine.MAXDIM:
warnings.warn(
f"Sample dimension q*d={effective_dim} exceeding Sobol max dimension "
f"({SobolEngine.MAXDIM}). Using iid samples instead.",
Expand Down
15 changes: 0 additions & 15 deletions botorch/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,21 +51,6 @@ class propagate_grads(_Flag):
_state: bool = False


class debug(_Flag):
r"""Flag for printing verbose warnings.
To make sure a warning is only raised in debug mode:
>>> if debug.on():
>>> warnings.warn(<some warning>)
"""

_state: bool = False

@classmethod
def _set_state(cls, state: bool) -> None:
cls._state = state


class validate_input_scaling(_Flag):
r"""Flag for validating input normalization/standardization.
Expand Down
2 changes: 0 additions & 2 deletions botorch/utils/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from unittest import mock, TestCase

import torch
from botorch import settings
from botorch.acquisition.objective import PosteriorTransform
from botorch.exceptions.warnings import BotorchTensorDimensionWarning, InputDataWarning
from botorch.models.model import FantasizeMixin, Model
Expand Down Expand Up @@ -47,7 +46,6 @@ class BotorchTestCase(TestCase):

def setUp(self, suppress_input_warnings: bool = True) -> None:
warnings.resetwarnings()
settings.debug._set_state(False)
warnings.simplefilter("always", append=True)
if suppress_input_warnings:
warnings.filterwarnings(
Expand Down
18 changes: 8 additions & 10 deletions test/acquisition/multi_objective/test_monte_carlo.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from warnings import catch_warnings, simplefilter

import torch
from botorch import settings
from botorch.acquisition import AcquisitionFunction
from botorch.acquisition.cached_cholesky import _get_cache_root_not_supported_message
from botorch.acquisition.multi_objective.base import MultiObjectiveMCAcquisitionFunction
Expand Down Expand Up @@ -249,10 +248,10 @@ def _test_q_expected_hypervolume_improvement(
acqf.model._posterior._samples = torch.zeros(1, 2, 2, **tkwargs)
res = evaluate(acqf, X)
X2 = torch.zeros(1, 1, 1, requires_grad=True, **tkwargs)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
acqf.set_X_pending(X2)
self.assertEqual(acqf.X_pending, X2)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)
self.assertEqual(acqf.X_pending, X2)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)

# test objective
acqf = acqf_class(
Expand Down Expand Up @@ -1127,12 +1126,12 @@ def _test_qnehvi_with_CBD(
# test set X_pending with grad
# Get posterior samples to agree with X_pending
mm._posterior._samples = torch.zeros(1, 7, m, **tkwargs)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
acqf.set_X_pending(
torch.cat([X_pending2, X_pending2], dim=0).requires_grad_(True)
)
self.assertIsNone(acqf.X_pending)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)
self.assertIsNone(acqf.X_pending)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)

# test max iep
mm._posterior._samples = baseline_samples
Expand Down Expand Up @@ -1704,9 +1703,8 @@ def _test_cache_root(self, acqf_class: type[AcquisitionFunction]):
# test we fall back to standard sampling for
# ill-conditioned covariances
acqf._baseline_L = torch.zeros_like(acqf._baseline_L)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with torch.no_grad():
evaluate(acqf, test_X)
with warnings.catch_warnings(record=True) as ws, torch.no_grad():
evaluate(acqf, test_X)
self.assertEqual(
sum(issubclass(w.category, BotorchWarning) for w in ws), 1
)
Expand Down
9 changes: 4 additions & 5 deletions test/acquisition/multi_objective/test_multi_fidelity.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from unittest import mock

import torch
from botorch import settings
from botorch.acquisition.multi_objective.multi_fidelity import MOMF
from botorch.acquisition.multi_objective.objective import IdentityMCMultiOutputObjective
from botorch.exceptions.errors import BotorchError
Expand Down Expand Up @@ -149,11 +148,11 @@ def test_momf(self):
acqf.model._posterior._samples = torch.zeros(1, 2, 2, **tkwargs)
res = acqf(X)
X2 = torch.zeros(1, 1, 1, requires_grad=True, **tkwargs)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
acqf.set_X_pending(X2)
self.assertEqual(acqf.X_pending, X2)
self.assertEqual(len(ws), 1)
self.assertTrue(issubclass(ws[-1].category, BotorchWarning))
self.assertEqual(acqf.X_pending, X2)
self.assertEqual(len(ws), 1)
self.assertTrue(issubclass(ws[-1].category, BotorchWarning))

# test objective
acqf = MOMF(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import warnings

import torch
from botorch import settings
from botorch.acquisition.multi_objective.multi_output_risk_measures import (
IndependentCVaR,
IndependentVaR,
Expand Down Expand Up @@ -512,7 +511,7 @@ def test_set_baseline_Y(self):
# With Y_samples.
mars._baseline_Y = None
Y_samples = model.posterior(X_baseline).mean
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
mars.set_baseline_Y(model=model, X_baseline=X_baseline, Y_samples=Y_samples)
self.assertTrue(torch.equal(mars.baseline_Y, torch.tensor([[1.5, 1.5]])))
self.assertTrue(any(w.category == BotorchWarning for w in ws))
Expand Down
28 changes: 13 additions & 15 deletions test/acquisition/test_cached_cholesky.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from unittest import mock

import torch
from botorch import settings
from botorch.acquisition.cached_cholesky import CachedCholeskyMCSamplerMixin
from botorch.acquisition.monte_carlo import MCAcquisitionFunction
from botorch.acquisition.objective import GenericMCObjective, MCAcquisitionObjective
Expand Down Expand Up @@ -191,20 +190,19 @@ def test_get_f_X_samples(self):
with mock.patch(
"botorch.acquisition.cached_cholesky.sample_cached_cholesky",
side_effect=error_cls,
) as mock_sample_cached_cholesky:
with warnings.catch_warnings(record=True) as ws, settings.debug(
True
):
samples = acqf._get_f_X_samples(posterior=posterior, q_in=q)
mock_sample_cached_cholesky.assert_called_once_with(
posterior=posterior,
baseline_L=acqf._baseline_L,
q=q,
base_samples=base_samples,
sample_shape=acqf.sampler.sample_shape,
)
self.assertTrue(issubclass(ws[0].category, BotorchWarning))
self.assertTrue(samples.shape, torch.Size([1, q, 1]))
) as mock_sample_cached_cholesky, warnings.catch_warnings(
record=True
) as ws:
samples = acqf._get_f_X_samples(posterior=posterior, q_in=q)
mock_sample_cached_cholesky.assert_called_once_with(
posterior=posterior,
baseline_L=acqf._baseline_L,
q=q,
base_samples=base_samples,
sample_shape=acqf.sampler.sample_shape,
)
self.assertTrue(issubclass(ws[0].category, BotorchWarning))
self.assertTrue(samples.shape, torch.Size([1, q, 1]))
# test HOGP
hogp = HigherOrderGP(torch.zeros(2, 1), torch.zeros(2, 1, 1)).eval()
acqf = DummyCachedCholeskyAcqf(
Expand Down
9 changes: 4 additions & 5 deletions test/acquisition/test_cost_aware.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import warnings

import torch
from botorch import settings
from botorch.acquisition.cost_aware import (
CostAwareUtility,
GenericCostAwareUtility,
Expand Down Expand Up @@ -63,11 +62,11 @@ def test_InverseCostWeightedUtility(self):
# check warning for negative cost
mm = MockModel(MockPosterior(mean=mean.clamp_max(-1e-6)))
icwu = InverseCostWeightedUtility(mm)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
icwu(X, deltas)
self.assertTrue(
any(issubclass(w.category, CostAwareWarning) for w in ws)
)
self.assertTrue(
any(issubclass(w.category, CostAwareWarning) for w in ws)
)

# basic test for both positive and negative delta values
mm = MockModel(MockPosterior(mean=mean))
Expand Down
7 changes: 3 additions & 4 deletions test/acquisition/test_decoupled.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import warnings

import torch
from botorch import settings
from botorch.acquisition.decoupled import DecoupledAcquisitionFunction
from botorch.exceptions import BotorchTensorDimensionError, BotorchWarning
from botorch.logging import shape_to_str
Expand Down Expand Up @@ -74,10 +73,10 @@ def test_decoupled_acquisition_function(self):
af.set_X_pending(X_pending=X_pending)
af.X_evaluation_mask = None
X_pending = X_pending.requires_grad_(True)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
af.set_X_pending(X_pending)
self.assertEqual(af.X_pending, X_pending)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)
self.assertEqual(af.X_pending, X_pending)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)
self.assertIsNone(af.X_evaluation_mask)

# test setting X_pending with X_pending_evaluation_mask
Expand Down
22 changes: 8 additions & 14 deletions test/acquisition/test_logei.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from unittest import mock

import torch
from botorch import settings
from botorch.acquisition import (
AcquisitionFunction,
LogImprovementMCAcquisitionFunction,
Expand Down Expand Up @@ -204,12 +203,10 @@ def test_q_log_expected_improvement(self):
mm._posterior._samples = torch.zeros(1, 2, 1, **tkwargs)
res = acqf(X)
X2 = torch.zeros(1, 1, 1, **tkwargs, requires_grad=True)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
acqf.set_X_pending(X2)
self.assertEqual(acqf.X_pending, X2)
self.assertEqual(
sum(issubclass(w.category, BotorchWarning) for w in ws), 1
)
self.assertEqual(acqf.X_pending, X2)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)

# testing with illegal taus
with self.assertRaisesRegex(ValueError, "tau_max is not a scalar:"):
Expand Down Expand Up @@ -419,12 +416,10 @@ def test_q_log_noisy_expected_improvement(self):
X2 = torch.zeros(
1, 1, 1, device=self.device, dtype=dtype, requires_grad=True
)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
log_acqf.set_X_pending(X2)
self.assertEqual(log_acqf.X_pending, X2)
self.assertEqual(
sum(issubclass(w.category, BotorchWarning) for w in ws), 1
)
self.assertEqual(log_acqf.X_pending, X2)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)

def test_q_noisy_expected_improvement_batch(self):
for dtype in (torch.float, torch.double):
Expand Down Expand Up @@ -670,9 +665,8 @@ def test_cache_root(self):
# test we fall back to standard sampling for
# ill-conditioned covariances
acqf._baseline_L = torch.zeros_like(acqf._baseline_L)
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
with torch.no_grad():
acqf(test_X)
with warnings.catch_warnings(record=True) as ws, torch.no_grad():
acqf(test_X)
self.assertEqual(sum(issubclass(w.category, BotorchWarning) for w in ws), 1)

# test w/ posterior transform
Expand Down
Loading

0 comments on commit 3692626

Please sign in to comment.