Skip to content

Commit

Permalink
Add mocks for optimize_acqf_mixed_alternating in fast_optimize_contex…
Browse files Browse the repository at this point in the history
…t_manager (#2598)

Summary:
Pull Request resolved: #2598

This mocks the default number of iterations to 1 for `optimize_acqf_mixed_alternating`, which should speed up testing considerably when combined with the existing mocks for `scipy_minimize`.

Differential Revision: D65146455
  • Loading branch information
saitcakmak authored and facebook-github-bot committed Oct 29, 2024
1 parent bd1e103 commit 77d52db
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 3 deletions.
9 changes: 8 additions & 1 deletion botorch/test_utils/mock.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
gen_batch_initial_conditions,
gen_one_shot_kg_initial_conditions,
)

from botorch.optim.utils.timeout import minimize_with_timeout
from scipy.optimize import OptimizeResult
from torch import Tensor
Expand Down Expand Up @@ -96,6 +95,14 @@ def minimal_gen_os_ics(*args: Any, **kwargs: Any) -> Tensor | None:
)
)

# Reduce default number of iterations in `optimize_acqf_mixed_alternating`.
for name in [
"MAX_ITER_ALTER",
"MAX_ITER_DISCRETE",
"MAX_ITER_CONT",
]:
es.enter_context(mock.patch(f"botorch.optim.optimize_mixed.{name}", new=1))

yield

if (not force) and all(
Expand Down
34 changes: 32 additions & 2 deletions test/test_utils/test_mock.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,12 @@
from botorch.optim.core import scipy_minimize
from botorch.optim.initializers import gen_batch_initial_conditions, initialize_q_batch
from botorch.optim.optimize import optimize_acqf

from botorch.optim.optimize_mixed import (
continuous_step,
discrete_step,
get_nearest_neighbors,
optimize_acqf_mixed_alternating,
)
from botorch.test_utils.mock import fast_optimize, fast_optimize_context_manager
from botorch.utils.testing import BotorchTestCase, MockAcquisitionFunction

Expand All @@ -32,7 +37,7 @@ def __call__(self, X):


class TestMock(BotorchTestCase):
def test_fast_optimize_context_manager(self):
def test_fast_optimize_context_manager(self) -> None:
with self.subTest("gen_candidates_scipy"):
with fast_optimize_context_manager():
cand, value = gen_candidates_scipy(
Expand Down Expand Up @@ -80,6 +85,31 @@ def closure():
)
self.assertEqual(mock_init_q_batch.call_args[1]["n"], 2)

def test_fast_optimize_mixed_alternating(self) -> None:
with patch(
"botorch.optim.optimize_mixed.discrete_step",
wraps=discrete_step,
) as mock_discrete, patch(
"botorch.optim.optimize_mixed.continuous_step",
wraps=continuous_step,
) as mock_continuous, patch(
"botorch.optim.optimize_mixed.get_nearest_neighbors",
wraps=get_nearest_neighbors,
) as mock_neighbors:
optimize_acqf_mixed_alternating(
acq_function=SinAcqusitionFunction(),
bounds=torch.tensor([[-2.0, 0.0], [2.0, 200.0]]),
discrete_dims=[1],
num_restarts=1,
)
# These should be called at most `MAX_ITER_ALTER` times for each random
# restart, which is mocked to 1.
mock_discrete.assert_called_once()
mock_continuous.assert_called_once()
# This should be called at most `MAX_ITER_DISCRETE` in each call of
# `mock_discrete`, which should total to 1.
mock_neighbors.assert_called_once()

@fast_optimize
def test_decorator(self) -> None:
model = SingleTaskGP(
Expand Down

0 comments on commit 77d52db

Please sign in to comment.