Skip to content

Commit

Permalink
Add a constraint layer (experimental) (#1053)
Browse files Browse the repository at this point in the history
  • Loading branch information
jrapin authored Feb 26, 2021
1 parent b59315d commit 124f8cb
Show file tree
Hide file tree
Showing 10 changed files with 194 additions and 17 deletions.
4 changes: 4 additions & 0 deletions nevergrad/ops/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
94 changes: 94 additions & 0 deletions nevergrad/ops/constraints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import numpy as np
import nevergrad.common.typing as tp
from nevergrad.common import errors
from nevergrad.parametrization import core
from nevergrad import optimizers
from nevergrad import callbacks


class Constraint(core.Operator):
"""Operator for applying a constraint on a Parameter
Parameters
----------
func: function
the constraint function, taking the same arguments that the function to optimize.
This constraint function must return a float (or a list/tuple/array of floats),
positive if the constraint is not satisfied, null or negative otherwise.
optimizer: str
name of the optimizer to use for solving the constraint
budget: int
the budget to use for applying the constraint
Example
-------
>>> constrained_parameter = Constraint(constraint_function)(parameter)
>>> constrained_parameter.value # value after trying to satisfy the constraint
"""

_LAYER_LEVEL = core.Level.CONSTRAINT

def __init__(self, func: tp.Callable[..., tp.Loss], optimizer: str = "NGOpt", budget: int = 100) -> None:
super().__init__()
self._func = func
self._opt_cls = optimizers.registry[optimizer]
self._budget = budget
self._cache: tp.Any = None

def _layered_del_value(self) -> None:
self._cache = None # clear cache!

def apply_constraint(self, parameter: core.Parameter) -> core.Parameter:
"""Find a new parameter that better satisfies the constraint"""
# This function can be overriden
optim = self._opt_cls(parameter, budget=self._budget)
early_stopping = callbacks.EarlyStopping(self.stopping_criterion)
optim.register_callback("ask", early_stopping)
optim.minimize(self.function)
return optim.pareto_front()[0]

def function(self, *args: tp.Any, **kwargs: tp.Any) -> tp.Loss:
out = self._func(*args, **kwargs)
if isinstance(out, (bool, np.bool)):
raise errors.NevergradTypeError(
"Constraint must be a positive float if unsatisfied constraint (not bool)"
)
return np.maximum(0, out) # type: ignore

def parameter(self) -> core.Parameter:
"""Returns a constraint-free parameter, for the optimization process"""
param = self._layers[0].copy()
# remove last layer and make sure it is the last one
if self._index != param._layers.pop()._index:
raise RuntimeError("Constraint layer should be unique and placed last")
return param # type: ignore

def stopping_criterion(self, optimizer: tp.Any) -> bool:
"""Checks whether a solution was found
This is used as stopping criterio callback
"""
if optimizer.num_tell < 1:
return False
best = optimizer.pareto_front()[0]
return not np.any(best.losses > 0)

def _layered_get_value(self) -> tp.Any:
# TODO: this can be made more efficient (fewer copy) if need be.
# Override only apply_constraint if you can, tampering with this method is tricky
if self._cache is not None:
return self._cache
parameter = self.parameter()
satisfied = not np.any(self.function(*parameter.args, **parameter.kwargs))
if satisfied:
self._cache = parameter.value
return self._cache
root: core.Parameter = self._layers[0] # type: ignore
recom = self.apply_constraint(parameter)
root.set_standardized_data(np.zeros(root.dimension), reference=recom)
self._cache = recom.value
return self._cache
27 changes: 27 additions & 0 deletions nevergrad/ops/test_constraints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import typing as tp
import pytest
import numpy as np
import nevergrad as ng
from . import constraints


def function(*args: float) -> tp.Any:
if len(args) == 1:
return args[0]
return args


@pytest.mark.parametrize("num", (1, 3)) # type: ignore
def test_constraint(num: int) -> None:
parameter = ng.p.Instrumentation(*(ng.p.Scalar(np.random.randn()) for _ in range(num)))
constrained = constraints.Constraint(function)(parameter)
# check basic layer functionalities
layer: constraints.Constraint = constrained._layers[-1] # type: ignore
assert layer.function(*([1] * num)) == 1 if num == 1 else [1] * num
assert layer.function(*([-1] * num)) == 0 if num == 1 else [0] * num
assert sum(x < 0.1 for x in constrained.args) == num, constrained.args # some slack to avoid flakiness
2 changes: 2 additions & 0 deletions nevergrad/optimization/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,6 +467,8 @@ def ask(self) -> p.Parameter:
assert (
candidate is not None
), f"{self.__class__.__name__}._internal_ask method returned None instead of a point."
# make sure to call value getter which may update the value, before we freeze the paremeter
candidate.value # pylint: disable=pointless-statement
candidate.freeze() # make sure it is not modified somewhere
return candidate

Expand Down
18 changes: 10 additions & 8 deletions nevergrad/optimization/test_optimizerlib.py
Original file line number Diff line number Diff line change
Expand Up @@ -518,17 +518,19 @@ def _ellips(x: np.ndarray) -> float:
assert _target(default_recom) > 7.0 * _target(metamodel_recom)


@pytest.mark.parametrize(
"penalization,expected",
@pytest.mark.parametrize( # type: ignore
"penalization,expected,as_layer",
[
(False, [1.005573e00, 3.965783e-04]),
(True, [0.999987, -0.322118]),
(False, [1.005573e00, 3.965783e-04], False),
(True, [0.999987, -0.322118], False),
(False, [1.000760, -5.116619e-4], True),
],
)
@testing.suppress_nevergrad_warnings() # hides failed constraints
def test_constrained_optimization(penalization: bool, expected: tp.List[float]) -> None:
def test_constrained_optimization(penalization: bool, expected: tp.List[float], as_layer: bool) -> None:
def constraint(i: tp.Any) -> tp.Union[bool, float]:
return i[1]["x"][0] >= 1
out = i[1]["x"][0] >= 1
return out if not as_layer else float(not out)

parametrization = ng.p.Instrumentation(x=ng.p.Array(shape=(1,)), y=ng.p.Scalar())
optimizer = optlib.OnePlusOne(parametrization, budget=100)
Expand All @@ -541,8 +543,8 @@ def constraint(i: tp.Any) -> tp.Union[bool, float]: # pylint: disable=function-

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
optimizer.parametrization.register_cheap_constraint(constraint)
recom = optimizer.minimize(_square)
optimizer.parametrization.register_cheap_constraint(constraint, as_layer=as_layer)
recom = optimizer.minimize(_square, verbosity=2)
np.testing.assert_array_almost_equal([recom.kwargs["x"][0], recom.kwargs["y"]], expected)


Expand Down
5 changes: 3 additions & 2 deletions nevergrad/parametrization/_layering.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@ class Level(Enum):
OPERATION = 10

# final
ARRAY_CASTING = 900
INTEGER_CASTING = 1000 # must be the last layer
ARRAY_CASTING = 800
INTEGER_CASTING = 900
CONSTRAINT = 1000 # must be the last layer


class Layered:
Expand Down
8 changes: 6 additions & 2 deletions nevergrad/parametrization/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ def __init__(self, **parameters: tp.Any) -> None:
str, str
] = {} # hacky undocumented way to bypass boring representations

@property
def dimension(self) -> int:
return sum(x.dimension for x in self._content.values())

def _sanity_check(self, parameters: tp.List[core.Parameter]) -> None:
"""Check that all parameters are different"""
# TODO: this is first order, in practice we would need to test all the different
Expand Down Expand Up @@ -230,10 +234,10 @@ def __init__(self, *args: tp.Any, **kwargs: tp.Any) -> None:

@property
def args(self) -> tp.Tuple[tp.Any, ...]:
return self[0].value # type: ignore
return self.value[0] # type: ignore

@property
def kwargs(self) -> tp.Dict[str, tp.Any]:
return self[1].value # type: ignore
return self.value[1] # type: ignore

value: core.ValueProperty[tp.ArgsKwargs] = core.ValueProperty() # type: ignore
37 changes: 32 additions & 5 deletions nevergrad/parametrization/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
from . import utils
from ._layering import ValueProperty as ValueProperty
from ._layering import Layered as Layered
from ._layering import Level
from ._layering import Level as Level


# pylint: disable=no-value-for-parameter,pointless-statement
# pylint: disable=no-value-for-parameter,pointless-statement,import-outside-toplevel


P = tp.TypeVar("P", bound="Parameter")
Expand Down Expand Up @@ -266,7 +266,9 @@ def satisfies_constraints(self) -> bool:
return all(utils.float_penalty(func(val)) <= 0 for func in self._constraint_checkers)

def register_cheap_constraint(
self, func: tp.Union[tp.Callable[[tp.Any], bool], tp.Callable[[tp.Any], float]]
self,
func: tp.Union[tp.Callable[[tp.Any], bool], tp.Callable[[tp.Any], float]],
as_layer: bool = False,
) -> None:
"""Registers a new constraint on the parameter values.
Expand All @@ -281,10 +283,23 @@ def register_cheap_constraint(
- this is only for checking after mutation/recombination/etc if the value still satisfy the constraints.
The constraint is not used in those processes.
- constraints should be fast to compute.
- this function has an additional "as_layer" parameter which is experimental for now, and can have unexpected
behavior
"""
if getattr(func, "__name__", "not lambda") == "<lambda>": # LambdaType does not work :(
warnings.warn("Lambda as constraint is not advised because it may not be picklable.")
self._constraint_checkers.append(func)
if not as_layer:
self._constraint_checkers.append(func)
else:
from nevergrad.ops.constraints import Constraint
import nevergrad as ng

compat_func = (
func
if not isinstance(self, ng.p.Instrumentation)
else utils._ConstraintCompatibilityFunction(func)
)
self.add_layer(Constraint(compat_func)) # type: ignore

# %% random state

Expand Down Expand Up @@ -381,7 +396,7 @@ def _check_frozen(self) -> None:
self, Constant
): # nevermind constants (since they dont spawn children)
raise RuntimeError(
f"Cannot modify frozen Parameter {self}, please spawn a child and modify it instead"
f"Cannot modify frozen Parameter {self.name}, please spawn a child and modify it instead"
"(optimizers freeze the parametrization and all asked and told candidates to avoid border effects)"
)
self._subobjects.apply("_check_frozen")
Expand Down Expand Up @@ -480,3 +495,15 @@ def __init__(self, parameter: tp.Optional[Parameter] = None) -> None:
f"be used by the optimizer.\n(received {parameter} of type {type(parameter)})"
)
super().__init__(parameter)


class Operator(Layered):
"""Layer object that can be used as an operator on a Parameter"""

_LAYER_LEVEL = Level.OPERATION

def __call__(self, parameter: Parameter) -> Parameter:
"""Applies the operator on a Parameter to create a new Parameter"""
new = parameter.copy()
new.add_layer(self.copy())
return new
4 changes: 4 additions & 0 deletions nevergrad/parametrization/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,10 @@ def __init__(
self._ref_data: tp.Optional[np.ndarray] = None
self.add_layer(_layering.ArrayCasting())

@property
def dimension(self) -> int:
return int(np.prod(self._value.shape))

def _compute_descriptors(self) -> utils.Descriptors:
return utils.Descriptors(continuous=not self.integer)

Expand Down
12 changes: 12 additions & 0 deletions nevergrad/parametrization/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,3 +294,15 @@ def float_penalty(x: tp.Union[bool, float]) -> float:
elif isinstance(x, (float, np.float)):
return -min(0, x) # Negative ==> >0
raise TypeError(f"Only bools and floats are supported for check constaint, but got: {x} ({type(x)})")


class _ConstraintCompatibilityFunction:
"""temporary hack for "register_cheap_constraint", to be removed"""

def __init__(self, func: tp.Callable[[tp.Any], tp.Loss]) -> None:
self.func = func

def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> tp.Loss:
out = self.func((args, kwargs))
print("calling", args, kwargs, "out =", out)
return out

0 comments on commit 124f8cb

Please sign in to comment.