Skip to content

Commit

Permalink
Review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
Uri Granta committed Aug 22, 2024
1 parent c48a40a commit 31fb555
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 32 deletions.
59 changes: 29 additions & 30 deletions tests/integration/test_mixed_space_bayesian_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.
from __future__ import annotations

import dataclasses
from typing import cast

import numpy as np
Expand Down Expand Up @@ -47,6 +48,7 @@
Box,
CategoricalSearchSpace,
DiscreteSearchSpace,
EncoderFunction,
TaggedProductSearchSpace,
one_hot_encoder,
)
Expand Down Expand Up @@ -167,15 +169,32 @@ def test_optimizer_finds_minima_of_the_scaled_branin_function(
TensorType, TaggedProductSearchSpace, TrainableProbabilisticModel
],
) -> None:
initial_query_points = mixed_search_space.sample(5)
observer = mk_observer(ScaledBranin.objective)
mixed_branin = cast(SingleObjectiveTestProblem[TaggedProductSearchSpace], ScaledBranin)
_test_optimizer_finds_problem_minima(
dataclasses.replace(mixed_branin, search_space=mixed_search_space),
num_steps,
acquisition_rule,
)


def _test_optimizer_finds_problem_minima(
problem: SingleObjectiveTestProblem[TaggedProductSearchSpace],
num_steps: int,
acquisition_rule: AcquisitionRule[
TensorType, TaggedProductSearchSpace, TrainableProbabilisticModel
],
encoder: EncoderFunction | None = None,
) -> None:
initial_query_points = problem.search_space.sample(5)
observer = mk_observer(problem.objective)
initial_data = observer(initial_query_points)
model = GaussianProcessRegression(
build_gpr(initial_data, mixed_search_space, likelihood_variance=1e-8)
build_gpr(initial_data, problem.search_space, likelihood_variance=1e-8),
encoder=encoder,
)

dataset = (
BayesianOptimizer(observer, mixed_search_space)
BayesianOptimizer(observer, problem.search_space)
.optimize(num_steps, initial_data, model, acquisition_rule)
.try_get_final_dataset()
)
Expand All @@ -185,7 +204,7 @@ def test_optimizer_finds_minima_of_the_scaled_branin_function(
best_y = dataset.observations[arg_min_idx]
best_x = dataset.query_points[arg_min_idx]

relative_minimizer_err = tf.abs((best_x - ScaledBranin.minimizers) / ScaledBranin.minimizers)
relative_minimizer_err = tf.abs((best_x - problem.minimizers) / problem.minimizers)
# these accuracies are the current best for the given number of optimization steps, which makes
# this is a regression test
assert tf.reduce_any(tf.reduce_all(relative_minimizer_err < 0.1, axis=-1), axis=0)
Expand Down Expand Up @@ -286,30 +305,10 @@ def test_optimizer_finds_minima_of_the_categorical_scaled_branin_function(
TensorType, TaggedProductSearchSpace, TrainableProbabilisticModel
],
) -> None:
initial_query_points = cat_problem.search_space.sample(5)
observer = mk_observer(cat_problem.objective)
initial_data = observer(initial_query_points)

# model uses one-hot encoding for the categorical inputs
encoder = one_hot_encoder(cat_problem.search_space)
model = GaussianProcessRegression(
build_gpr(initial_data, cat_problem.search_space, likelihood_variance=1e-8),
encoder=encoder,
)

dataset = (
BayesianOptimizer(observer, cat_problem.search_space)
.optimize(num_steps, initial_data, model, acquisition_rule)
.try_get_final_dataset()
_test_optimizer_finds_problem_minima(
cat_problem,
num_steps,
acquisition_rule,
encoder=one_hot_encoder(cat_problem.search_space),
)

arg_min_idx = tf.squeeze(tf.argmin(dataset.observations, axis=0))

best_y = dataset.observations[arg_min_idx]
best_x = dataset.query_points[arg_min_idx]

relative_minimizer_err = tf.abs((best_x - cat_problem.minimizers) / cat_problem.minimizers)
assert tf.reduce_any(
tf.reduce_all(relative_minimizer_err < 0.1, axis=-1), axis=0
), relative_minimizer_err
npt.assert_allclose(best_y, cat_problem.minimum, rtol=0.005)
2 changes: 1 addition & 1 deletion tests/unit/acquisition/test_rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -2337,7 +2337,7 @@ def test_trust_region_discrete_update_size(

# Check the new set of neighbors.
if categorical:
# Hammond distance
# Hamming distance
neighbors_mask = tf.where(search_space.points != tr.location, 1, 0)
neighbors_mask = tf.reduce_sum(neighbors_mask, axis=-1) <= tr.eps
else:
Expand Down
2 changes: 1 addition & 1 deletion trieste/acquisition/rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -2280,7 +2280,7 @@ def _compute_global_distances(self) -> TensorType:
return tf.math.reduce_sum(
tf.where(tf.expand_dims(points, -2) == tf.expand_dims(points, -3), 0, 1),
axis=-1,
keepdims=True, # (keep last dim for distance calculation below)
keepdims=True, # (keep last dim for reduce_all distance calculation below)
) # [num_points, num_points, 1]
else:
return tf.abs(
Expand Down

0 comments on commit 31fb555

Please sign in to comment.