Skip to content

Commit 99d6ffb

Browse files
dme65facebook-github-bot
authored andcommitted
Non-linear inequality constraints
Summary: See title. This diff isn't ready for review by any means, but is an attempt to temporarily unblock facebook#769. Differential Revision: D33853834 fbshipit-source-id: 5c00203ff438226010693cb04a2694c0dbb0cea7
1 parent c3c6835 commit 99d6ffb

File tree

2 files changed

+26
-0
lines changed

2 files changed

+26
-0
lines changed

Diff for: ax/models/torch/botorch.py

+1
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,7 @@
8484
Tensor,
8585
Optional[Tuple[Tensor, Tensor]],
8686
Optional[Tuple[Tensor, Tensor]],
87+
Optional[List[Callable]],
8788
Optional[Dict[int, float]],
8889
Optional[TConfig],
8990
Optional[Dict[int, float]],

Diff for: ax/models/torch/botorch_defaults.py

+25
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,9 @@ def scipy_optimizer(
304304
n: int,
305305
inequality_constraints: Optional[List[Tuple[Tensor, Tensor, float]]] = None,
306306
equality_constraints: Optional[List[Tuple[Tensor, Tensor, float]]] = None,
307+
nonlinear_inequality_constraints: Optional[List[Callable]] = None,
307308
fixed_features: Optional[Dict[int, float]] = None,
309+
batch_initial_conditions: Optional[Tensor] = None,
308310
rounding_func: Optional[Callable[[Tensor], Tensor]] = None,
309311
**kwargs: Any,
310312
) -> Tuple[Tensor, Tensor]:
@@ -321,6 +323,14 @@ def scipy_optimizer(
321323
equality constraints: A list of tuples (indices, coefficients, rhs),
322324
with each tuple encoding an equality constraint of the form
323325
`\sum_i (X[indices[i]] * coefficients[i]) == rhs`
326+
nonlinear_inequality_constraints: A list of callables with that represent
327+
non-linear inequality constraints of the form `callable(x) >= 0`. Each
328+
callable is expected to take a `(num_restarts) x q x d`-dim tensor as an
329+
input and return a `(num_restarts) x q`-dim tensor with the constraint
330+
values. The constraints will later be passed to SLSQP. You need to pass in
331+
`batch_initial_conditions` in this case.
332+
batch_initial_conditions: A tensor to specify the initial conditions. Set
333+
this if you do not want to use default initialization strategy.
324334
fixed_features: A map {feature_index: value} for features that should
325335
be fixed to a particular value during generation.
326336
rounding_func: A function that rounds an optimization result
@@ -352,7 +362,9 @@ def scipy_optimizer(
352362
options=kwargs,
353363
inequality_constraints=inequality_constraints,
354364
equality_constraints=equality_constraints,
365+
nonlinear_inequality_constraints=nonlinear_inequality_constraints,
355366
fixed_features=fixed_features,
367+
batch_initial_conditions=batch_initial_conditions,
356368
sequential=sequential,
357369
post_processing_func=rounding_func,
358370
)
@@ -365,6 +377,7 @@ def recommend_best_observed_point(
365377
objective_weights: Tensor,
366378
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
367379
linear_constraints: Optional[Tuple[Tensor, Tensor]] = None,
380+
nonlinear_inequality_constraints: Optional[List[Callable]] = None,
368381
fixed_features: Optional[Dict[int, float]] = None,
369382
model_gen_options: Optional[TConfig] = None,
370383
target_fidelities: Optional[Dict[int, float]] = None,
@@ -385,6 +398,8 @@ def recommend_best_observed_point(
385398
linear_constraints: A tuple of (A, b). For k linear constraints on
386399
d-dimensional x, A is (k x d) and b is (k x 1) such that
387400
A x <= b.
401+
nonlinear_inequality_constraints: A list of callables with that represent
402+
non-linear inequality constraints of the form `callable(x) >= 0`.
388403
fixed_features: A map {feature_index: value} for features that
389404
should be fixed to a particular value in the best point.
390405
model_gen_options: A config dictionary that can contain
@@ -396,6 +411,11 @@ def recommend_best_observed_point(
396411
Returns:
397412
A d-array of the best point, or None if no feasible point was observed.
398413
"""
414+
if nonlinear_inequality_constraints:
415+
raise NotImplementedError(
416+
"`nonlinear_inequality_constraints` aren't supported by "
417+
"`recommend_best_observed_point`."
418+
)
399419
if target_fidelities:
400420
raise NotImplementedError(
401421
"target_fidelities not implemented for base BotorchModel"
@@ -421,6 +441,7 @@ def recommend_best_out_of_sample_point(
421441
objective_weights: Tensor,
422442
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
423443
linear_constraints: Optional[Tuple[Tensor, Tensor]] = None,
444+
nonlinear_inequality_constraints: Optional[List[Callable]] = None,
424445
fixed_features: Optional[Dict[int, float]] = None,
425446
model_gen_options: Optional[TConfig] = None,
426447
target_fidelities: Optional[Dict[int, float]] = None,
@@ -442,6 +463,8 @@ def recommend_best_out_of_sample_point(
442463
linear_constraints: A tuple of (A, b). For k linear constraints on
443464
d-dimensional x, A is (k x d) and b is (k x 1) such that
444465
A x <= b.
466+
nonlinear_inequality_constraints: A list of callables with that represent
467+
non-linear inequality constraints of the form `callable(x) >= 0`.
445468
fixed_features: A map {feature_index: value} for features that
446469
should be fixed to a particular value in the best point.
447470
model_gen_options: A config dictionary that can contain
@@ -483,6 +506,8 @@ def recommend_best_out_of_sample_point(
483506
# (including transforming constraints b/c of fixed features)
484507
if inequality_constraints is not None:
485508
raise UnsupportedError("Inequality constraints are not supported!")
509+
if nonlinear_inequality_constraints is not None:
510+
raise UnsupportedError("Non-linear inequality constraints are not supported!")
486511

487512
return_best_only = optimizer_options.get("return_best_only", True)
488513
bounds_ = torch.tensor(bounds, dtype=model.dtype, device=model.device)

0 commit comments

Comments
 (0)