From ec2d41a687d4d3636f2f912f2533ae5f3aa0ca93 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Thu, 11 Feb 2021 12:52:05 +0100 Subject: [PATCH 01/39] Restructure around overridable --- nevergrad/parametrization/core.py | 36 +++--------------------- nevergrad/parametrization/utils.py | 45 ++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 32 deletions(-) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index dd3cddad7..45212a342 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -10,43 +10,14 @@ import nevergrad.common.typing as tp from nevergrad.common import errors from . import utils - -# pylint: disable=no-value-for-parameter +from .utils import ValueProperty as ValueProperty P = tp.TypeVar("P", bound="Parameter") -X = tp.TypeVar("X") - - -class ValueProperty(tp.Generic[X]): - """Typed property (descriptor) object so that the value attribute of - Parameter objects fetches _get_value and _set_value methods - """ - - # This uses the descriptor protocol, like a property: - # See https://docs.python.org/3/howto/descriptor.html - # - # Basically parameter.value calls parameter.value.__get__ - # and then parameter._get_value - def __init__(self) -> None: - self.__doc__ = """Value of the Parameter, which should be sent to the function - to optimize. - - Example - ------- - >>> ng.p.Array(shape=(2,)).value - array([0., 0.]) - """ - - def __get__(self, obj: "Parameter", objtype: tp.Optional[tp.Type[object]] = None) -> X: - return obj._get_value() # type: ignore - - def __set__(self, obj: "Parameter", value: X) -> None: - obj._set_value(value) -# pylint: disable=too-many-instance-attributes,too-many-public-methods -class Parameter: +# pylint: disable=too-many-public-methods +class Parameter(utils.Overridable): """Class providing the core functionality of a parameter, aka value, internal/model parameters, mutation, recombination and additional features such as shared random state, @@ -67,6 +38,7 @@ class Parameter: def __init__(self) -> None: # Main features + super().__init__(applied_on=None) self.uid = uuid.uuid4().hex self._subobjects = utils.Subobjects( self, base=Parameter, attribute="__dict__" diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index 5568f4a9f..3c6d899cf 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -12,6 +12,7 @@ import numpy as np from nevergrad.common import typing as tp from nevergrad.common import tools as ngtools +from nevergrad.common import errors class BoundChecker: @@ -294,3 +295,47 @@ def float_penalty(x: tp.Union[bool, float]) -> float: elif isinstance(x, (float, np.float)): return -min(0, x) # Negative ==> >0 raise TypeError(f"Only bools and floats are supported for check constaint, but got: {x} ({type(x)})") + + +class Overridable: + def __init__(self, applied_on: tp.Optional["Overridable"]) -> None: + self._applied_on = applied_on + + def _get_value(self) -> tp.Any: + if self._applied_on is None: + raise errors.UnsupportedParameterOperationError("_get_value is undefinied") + self._applied_on._get_value() + + def _set_value(self, value: tp.Any) -> None: + if self._applied_on is None: + raise errors.UnsupportedParameterOperationError("_set_value is undefinied") + self._applied_on._set_value(value) + + +class ValueProperty(tp.Generic[X]): + """Typed property (descriptor) object so that the value attribute of + Parameter objects fetches _get_value and _set_value methods + """ + + # This uses the descriptor protocol, like a property: + # See https://docs.python.org/3/howto/descriptor.html + # + # Basically parameter.value calls parameter.value.__get__ + # and then parameter._get_value + def __init__(self) -> None: + self.__doc__ = """Value of the Parameter, which should be sent to the function + to optimize. + + Example + ------- + >>> ng.p.Array(shape=(2,)).value + array([0., 0.]) + """ + + def __get__(self, obj: Overridable, objtype: tp.Optional[tp.Type[object]] = None) -> X: + base = obj if obj._applied_on is None else obj._applied_on + return base._get_value() # type: ignore + + def __set__(self, obj: Overridable, value: X) -> None: + base = obj if obj._applied_on is None else obj._applied_on + base._set_value(value) From d3c567c6bc99750e719906e215671f2fa9fa2243 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Thu, 11 Feb 2021 13:03:05 +0100 Subject: [PATCH 02/39] fix --- nevergrad/parametrization/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index 3c6d899cf..efdad65d9 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -306,7 +306,7 @@ def _get_value(self) -> tp.Any: raise errors.UnsupportedParameterOperationError("_get_value is undefinied") self._applied_on._get_value() - def _set_value(self, value: tp.Any) -> None: + def _set_value(self, value: tp.Any) -> tp.Any: if self._applied_on is None: raise errors.UnsupportedParameterOperationError("_set_value is undefinied") self._applied_on._set_value(value) From 85ecb91ee87155baea5a3db4344e484f06f228ce Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Thu, 11 Feb 2021 13:03:57 +0100 Subject: [PATCH 03/39] useless --- nevergrad/parametrization/utils.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index efdad65d9..3b8d19f2c 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -12,7 +12,6 @@ import numpy as np from nevergrad.common import typing as tp from nevergrad.common import tools as ngtools -from nevergrad.common import errors class BoundChecker: @@ -303,12 +302,12 @@ def __init__(self, applied_on: tp.Optional["Overridable"]) -> None: def _get_value(self) -> tp.Any: if self._applied_on is None: - raise errors.UnsupportedParameterOperationError("_get_value is undefinied") + raise NotImplementedError self._applied_on._get_value() def _set_value(self, value: tp.Any) -> tp.Any: if self._applied_on is None: - raise errors.UnsupportedParameterOperationError("_set_value is undefinied") + raise NotImplementedError self._applied_on._set_value(value) From 5032364d418a1ceb11e2c7381b2a3339ad041559 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Thu, 11 Feb 2021 16:40:45 +0100 Subject: [PATCH 04/39] wip --- nevergrad/common/decorators.py | 4 +- nevergrad/common/errors.py | 8 +++- nevergrad/parametrization/core.py | 13 ++---- nevergrad/parametrization/utils.py | 65 ++++++++++++++++++++++++------ 4 files changed, 63 insertions(+), 27 deletions(-) diff --git a/nevergrad/common/decorators.py b/nevergrad/common/decorators.py index d102e2f36..05f657663 100644 --- a/nevergrad/common/decorators.py +++ b/nevergrad/common/decorators.py @@ -33,8 +33,8 @@ def register_name( self, name: str, obj: X, info: tp.Optional[tp.Dict[tp.Hashable, tp.Any]] = None ) -> None: """Register an object with a provided name""" - if name in self: - raise RuntimeError(f'Encountered a name collision "{name}"') + # if name in self: + # raise RuntimeError(f'Encountered a name collision "{name}"') self[name] = obj if info is not None: assert isinstance(info, dict) diff --git a/nevergrad/common/errors.py b/nevergrad/common/errors.py index 6630d4c94..ea7b53a92 100644 --- a/nevergrad/common/errors.py +++ b/nevergrad/common/errors.py @@ -20,6 +20,10 @@ class NevergradWarning(Warning): # errors +class NevergradRuntimeError(RuntimeError, NevergradError): + """Runtime error raised by Nevergrad""" + + class TellNotAskedNotSupportedError(NotImplementedError, NevergradError): """To be raised by optimizers which do not support the tell_not_asked interface.""" @@ -35,11 +39,11 @@ class UnsupportedExperiment(RuntimeError, unittest.SkipTest, NevergradError): """ -class NevergradDeprecationError(RuntimeError, NevergradError): +class NevergradDeprecationError(NevergradRuntimeError): """Deprecated function/class""" -class UnsupportedParameterOperationError(RuntimeError, NevergradError): +class UnsupportedParameterOperationError(NevergradRuntimeError): """This type of operation is not supported by the parameter""" diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 45212a342..93b7c4448 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -4,7 +4,6 @@ # LICENSE file in the root directory of this source tree. import uuid -import copy import warnings import numpy as np import nevergrad.common.typing as tp @@ -17,7 +16,7 @@ # pylint: disable=too-many-public-methods -class Parameter(utils.Overridable): +class Parameter(utils.Layered): """Class providing the core functionality of a parameter, aka value, internal/model parameters, mutation, recombination and additional features such as shared random state, @@ -38,7 +37,7 @@ class Parameter(utils.Overridable): def __init__(self) -> None: # Main features - super().__init__(applied_on=None) + super().__init__() self.uid = uuid.uuid4().hex self._subobjects = utils.Subobjects( self, base=Parameter, attribute="__dict__" @@ -74,12 +73,6 @@ def losses(self) -> np.ndarray: return np.array([self.loss], dtype=float) raise RuntimeError("No loss was provided") - def _get_value(self) -> tp.Any: - raise NotImplementedError - - def _set_value(self, value: tp.Any) -> tp.Any: - raise NotImplementedError - @property def args(self) -> tp.Tuple[tp.Any, ...]: """Value of the positional arguments. @@ -355,7 +348,7 @@ def spawn_child(self: P, new_value: tp.Optional[tp.Any] = None) -> P: """ # make sure to initialize the random state before spawning children self.random_state # pylint: disable=pointless-statement - child = copy.copy(self) + child = self.copy() child.uid = uuid.uuid4().hex child._frozen = False child._generation += 1 diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index 3b8d19f2c..a20e88282 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -5,11 +5,13 @@ import os import sys +import copy import shutil import tempfile import subprocess from pathlib import Path import numpy as np +from nevergrad.common import errors from nevergrad.common import typing as tp from nevergrad.common import tools as ngtools @@ -296,19 +298,54 @@ def float_penalty(x: tp.Union[bool, float]) -> float: raise TypeError(f"Only bools and floats are supported for check constaint, but got: {x} ({type(x)})") -class Overridable: - def __init__(self, applied_on: tp.Optional["Overridable"]) -> None: - self._applied_on = applied_on +L = tp.TypeVar("L", bound="Layered") + + +class Layered: + """Hidden API for overriding/modifying the behavior of a Parameter, + which is itself a Layered object. + """ + + def __init__(self) -> None: + self._layers = [self] + self._index = 0 + + def _get_layer_index(self) -> int: + if self._layers[self._index] is not self: + raise errors.NevergradRuntimeError( + "Layer indexing has changed for an unknown reason. Please open an issue" + ) + return self._index def _get_value(self) -> tp.Any: - if self._applied_on is None: + index = self._get_layer_index() + if not index: # roor must have an implementation raise NotImplementedError - self._applied_on._get_value() + self._layers[index - 1]._get_value() def _set_value(self, value: tp.Any) -> tp.Any: - if self._applied_on is None: + index = self._get_layer_index() + if not index: # roor must have an implementation raise NotImplementedError - self._applied_on._set_value(value) + self._layers[index - 1]._set_value(value) + + def _del_value(self) -> tp.Any: + pass + + def add_layer(self: L, other: "Layered") -> L: + if self is not self._layers[0]: + raise errors.NevergradRuntimeError("Layers can only be added from the root.") + if len(other._layers) > 1: + raise errors.NevergradRuntimeError("Cannot append multiple layers at once") + other._index = len(self._layers) + self._layers.append(other) + return self + + def copy(self: L) -> L: + new = copy.copy(self) + new._layers = [new] + self._index = 0 + return new class ValueProperty(tp.Generic[X]): @@ -331,10 +368,12 @@ def __init__(self) -> None: array([0., 0.]) """ - def __get__(self, obj: Overridable, objtype: tp.Optional[tp.Type[object]] = None) -> X: - base = obj if obj._applied_on is None else obj._applied_on - return base._get_value() # type: ignore + def __get__(self, obj: Layered, objtype: tp.Optional[tp.Type[object]] = None) -> X: + return obj._layers[-1]._get_value() # type: ignore + + def __set__(self, obj: Layered, value: X) -> None: + obj._layers[-1]._set_value(value) - def __set__(self, obj: Overridable, value: X) -> None: - base = obj if obj._applied_on is None else obj._applied_on - base._set_value(value) + def __delete__(self, obj: Layered) -> None: + for layer in obj._layers: + layer._del_value() From 2bfd6e740db94baadf63b98552517425ba72dc8f Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Thu, 11 Feb 2021 16:53:44 +0100 Subject: [PATCH 05/39] fix --- nevergrad/common/decorators.py | 4 ++-- nevergrad/parametrization/core.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nevergrad/common/decorators.py b/nevergrad/common/decorators.py index 05f657663..d102e2f36 100644 --- a/nevergrad/common/decorators.py +++ b/nevergrad/common/decorators.py @@ -33,8 +33,8 @@ def register_name( self, name: str, obj: X, info: tp.Optional[tp.Dict[tp.Hashable, tp.Any]] = None ) -> None: """Register an object with a provided name""" - # if name in self: - # raise RuntimeError(f'Encountered a name collision "{name}"') + if name in self: + raise RuntimeError(f'Encountered a name collision "{name}"') self[name] = obj if info is not None: assert isinstance(info, dict) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 93b7c4448..07bffea3b 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -348,7 +348,7 @@ def spawn_child(self: P, new_value: tp.Optional[tp.Any] = None) -> P: """ # make sure to initialize the random state before spawning children self.random_state # pylint: disable=pointless-statement - child = self.copy() + child = super().copy() child.uid = uuid.uuid4().hex child._frozen = False child._generation += 1 From 2ae16101d5eca5987c06f58d9b9ec724ed590958 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 10:19:37 +0100 Subject: [PATCH 06/39] propagatelayers --- nevergrad/parametrization/core.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 07bffea3b..084415b39 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -359,6 +359,12 @@ def spawn_child(self: P, new_value: tp.Optional[tp.Any] = None) -> P: child.loss = None child._losses = None child._constraint_checkers = list(self._constraint_checkers) + # layers + if self is not self._layers[0]: + raise errors.RuntimeError("Something has gone horribly wrong with the layers") + for layer in self._layers[1:]: + child.add_layer(layer.copy()) + # subparameters attribute = self._subobjects.attribute container = getattr(child, attribute) if attribute != "__dict__": # make a copy of the container if different from __dict__ @@ -390,6 +396,8 @@ def copy(self: P) -> P: # TODO test (see former instrumentation_copy test) This is used to run multiple experiments """ child = self.spawn_child() + child._generation -= 1 + child.parents_uids = list(self.parents_uids) child.random_state = None return child From 22ec11673b818fdd71297961a6f6b0cb67ab5f9f Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 10:21:10 +0100 Subject: [PATCH 07/39] doc --- nevergrad/parametrization/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index a20e88282..0711808fd 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -333,6 +333,7 @@ def _del_value(self) -> tp.Any: pass def add_layer(self: L, other: "Layered") -> L: + """Adds a layer which will modify the object behavior""" if self is not self._layers[0]: raise errors.NevergradRuntimeError("Layers can only be added from the root.") if len(other._layers) > 1: @@ -342,6 +343,7 @@ def add_layer(self: L, other: "Layered") -> L: return self def copy(self: L) -> L: + """Creates a new unattached layer with the same behavior""" new = copy.copy(self) new._layers = [new] self._index = 0 From cf02f8042b20f9606e1716439ffc9d6e46f34a72 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 11:48:55 +0100 Subject: [PATCH 08/39] Prepare integer --- nevergrad/parametrization/core.py | 1 + nevergrad/parametrization/layers.py | 24 ++++++++++++++++++++++++ nevergrad/parametrization/utils.py | 27 +++++++++++++++++++++------ 3 files changed, 46 insertions(+), 6 deletions(-) create mode 100644 nevergrad/parametrization/layers.py diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 084415b39..b34116902 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -33,6 +33,7 @@ class Parameter(utils.Layered): # sub-parameters. # Spawning a child creates a shallow copy. + _LAYER_LEVEL = 0.0 value: ValueProperty[tp.Any] = ValueProperty() def __init__(self) -> None: diff --git a/nevergrad/parametrization/layers.py b/nevergrad/parametrization/layers.py new file mode 100644 index 000000000..949a5e159 --- /dev/null +++ b/nevergrad/parametrization/layers.py @@ -0,0 +1,24 @@ +import numpy as np +from nevergrad.common import errors +import nevergrad.common.typing as tp +from . import utils + + +class _ScalarCasting(utils.Layered): + def _get_value(self) -> float: + out = super()._get_value() # pulls from previous layer + if not isinstance(out, np.ndarray) or not out.size == 1: + raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") + integer = issubclass(out.dtype, np.int) + out = (int if integer else float)(out[0]) + return out # type: ignore + + def _set_value(self, value: tp.Any) -> None: + if not isinstance(value, (float, int, np.float, np.int)): + raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") + value = np.array([value], dtype=float) + + +class IntegerCasting(utils.Layered): + def _get_value(self) -> np.ndarry: + return np.round(super()._get_value()).astype(int) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index 0711808fd..f46bef6cd 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -7,6 +7,7 @@ import sys import copy import shutil +import bisect import tempfile import subprocess from pathlib import Path @@ -304,8 +305,16 @@ def float_penalty(x: tp.Union[bool, float]) -> float: class Layered: """Hidden API for overriding/modifying the behavior of a Parameter, which is itself a Layered object. + + Layers can be added and will be ordered depending on their level + 0: root + 1-10: bounds + 11-20: casting + 21-30: casting """ + _LAYER_LEVEL = 1.0 + def __init__(self) -> None: self._layers = [self] self._index = 0 @@ -319,13 +328,13 @@ def _get_layer_index(self) -> int: def _get_value(self) -> tp.Any: index = self._get_layer_index() - if not index: # roor must have an implementation + if not index: # root must have an implementation raise NotImplementedError - self._layers[index - 1]._get_value() + return self._layers[index - 1]._get_value() def _set_value(self, value: tp.Any) -> tp.Any: index = self._get_layer_index() - if not index: # roor must have an implementation + if not index: # root must have an implementation raise NotImplementedError self._layers[index - 1]._set_value(value) @@ -334,12 +343,18 @@ def _del_value(self) -> tp.Any: def add_layer(self: L, other: "Layered") -> L: """Adds a layer which will modify the object behavior""" - if self is not self._layers[0]: + if self is not self._layers[0] or self._LAYER_LEVEL: raise errors.NevergradRuntimeError("Layers can only be added from the root.") if len(other._layers) > 1: raise errors.NevergradRuntimeError("Cannot append multiple layers at once") - other._index = len(self._layers) - self._layers.append(other) + if other._LAYER_LEVEL >= self._layers[-1]._LAYER_LEVEL: + other._index = len(self._layers) + self._layers.append(other) + else: + ind = bisect.bisect_right([x._LAYER_LEVEL for x in self._layers], other._LAYER_LEVEL) + self._layers.insert(ind, other) + for k, x in enumerate(self._layers): + x._index = k return self def copy(self: L) -> L: From 47ed796d354970b7bbaaeca726a9519a4747f957 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 11:55:19 +0100 Subject: [PATCH 09/39] name --- nevergrad/parametrization/core.py | 24 +----------------------- nevergrad/parametrization/data.py | 4 +++- nevergrad/parametrization/layers.py | 2 +- nevergrad/parametrization/utils.py | 24 ++++++++++++++++++++++++ 4 files changed, 29 insertions(+), 25 deletions(-) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index b34116902..d3efdbbf3 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -53,7 +53,6 @@ def __init__(self) -> None: self._generation = 0 # self._constraint_checkers: tp.List[tp.Union[tp.Callable[[tp.Any], bool], tp.Callable[[tp.Any], float]]] = [] self._constraint_checkers: tp.List[tp.Callable[[tp.Any], tp.Union[bool, float]]] = [] - self._name: tp.Optional[str] = None self._frozen = False self._descriptors: tp.Optional[utils.Descriptors] = None self._meta: tp.Dict[tp.Hashable, tp.Any] = {} # for anything algorithm related @@ -231,27 +230,6 @@ def get_value_hash(self) -> tp.Hashable: f"Value hash is not supported for object {self.name}" ) - def _get_name(self) -> str: - """Internal implementation of parameter name. This should be value independant, and should not account - for internal/model parameters. - """ - return self.__class__.__name__ - - @property - def name(self) -> str: - """Name of the parameter - This is used to keep track of how this Parameter is configured (included through internal/model parameters), - mostly for reproducibility A default version is always provided, but can be overriden directly - through the attribute, or through the set_name method (which allows chaining). - """ - if self._name is not None: - return self._name - return self._get_name() - - @name.setter - def name(self, name: str) -> None: - self.set_name(name) # with_name allows chaining - def __repr__(self) -> str: strings = [self.name] if not callable(self.value): # not a mutation @@ -362,7 +340,7 @@ def spawn_child(self: P, new_value: tp.Optional[tp.Any] = None) -> P: child._constraint_checkers = list(self._constraint_checkers) # layers if self is not self._layers[0]: - raise errors.RuntimeError("Something has gone horribly wrong with the layers") + raise errors.NevergradRuntimeError("Something has gone horribly wrong with the layers") for layer in self._layers[1:]: child.add_layer(layer.copy()) # subparameters diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index ccdc1e906..247c2550c 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -8,6 +8,7 @@ import numpy as np import nevergrad.common.typing as tp from . import core +from . import layers from .container import Dict from . import utils from . import transforms as trans @@ -339,7 +340,8 @@ def set_integer_casting(self: D) -> D: difficult. It is especially ill-advised to use this with a range smaller than 10, or a sigma lower than 1. In those cases, you should rather use a TransitionChoice instead. """ - self.integer = True + self.add_layer(layers.IntegerCasting()) + # self.integer = True return self # pylint: disable=unused-argument diff --git a/nevergrad/parametrization/layers.py b/nevergrad/parametrization/layers.py index 949a5e159..b00e03462 100644 --- a/nevergrad/parametrization/layers.py +++ b/nevergrad/parametrization/layers.py @@ -20,5 +20,5 @@ def _set_value(self, value: tp.Any) -> None: class IntegerCasting(utils.Layered): - def _get_value(self) -> np.ndarry: + def _get_value(self) -> np.ndarray: return np.round(super()._get_value()).astype(int) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index f46bef6cd..05ea5d408 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -318,6 +318,7 @@ class Layered: def __init__(self) -> None: self._layers = [self] self._index = 0 + self._name: tp.Optional[str] = None def _get_layer_index(self) -> int: if self._layers[self._index] is not self: @@ -364,6 +365,29 @@ def copy(self: L) -> L: self._index = 0 return new + # naming capacity + + def _get_name(self) -> str: + """Internal implementation of parameter name. This should be value independant, and should not account + for internal/model parameters. + """ + return self.__class__.__name__ + + @property + def name(self) -> str: + """Name of the parameter + This is used to keep track of how this Parameter is configured (included through internal/model parameters), + mostly for reproducibility A default version is always provided, but can be overriden directly + through the attribute, or through the set_name method (which allows chaining). + """ + if self._name is not None: + return self._name + return self._get_name() + + @name.setter + def name(self, name: str) -> None: + self.set_name(name) # with_name allows chaining + class ValueProperty(tp.Generic[X]): """Typed property (descriptor) object so that the value attribute of From bd1fb0d9985af8aec8c9bd209ce15f0760b2a774 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 11:59:59 +0100 Subject: [PATCH 10/39] fix --- nevergrad/parametrization/utils.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index 05ea5d408..83757d378 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -321,6 +321,7 @@ def __init__(self) -> None: self._name: tp.Optional[str] = None def _get_layer_index(self) -> int: + print(self, self._layers) if self._layers[self._index] is not self: raise errors.NevergradRuntimeError( "Layer indexing has changed for an unknown reason. Please open an issue" @@ -356,6 +357,7 @@ def add_layer(self: L, other: "Layered") -> L: self._layers.insert(ind, other) for k, x in enumerate(self._layers): x._index = k + other._layers = self._layers return self def copy(self: L) -> L: @@ -373,6 +375,9 @@ def _get_name(self) -> str: """ return self.__class__.__name__ + def __repr__(self) -> str: + return self.name + @property def name(self) -> str: """Name of the parameter @@ -388,6 +393,17 @@ def name(self) -> str: def name(self, name: str) -> None: self.set_name(name) # with_name allows chaining + def set_name(self: L, name: str) -> L: + """Sets a name and return the current instrumentation (for chaining) + + Parameters + ---------- + name: str + new name to use to represent the Parameter + """ + self._name = name + return self + class ValueProperty(tp.Generic[X]): """Typed property (descriptor) object so that the value attribute of From c88203fdc303182fd45bfe0421d8c19684f39ce0 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 13:50:52 +0100 Subject: [PATCH 11/39] fixes --- nevergrad/parametrization/core.py | 12 +----------- nevergrad/parametrization/layers.py | 4 ++++ nevergrad/parametrization/utils.py | 15 ++++++++++++--- 3 files changed, 17 insertions(+), 14 deletions(-) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index d3efdbbf3..257bf7b12 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -53,6 +53,7 @@ def __init__(self) -> None: self._generation = 0 # self._constraint_checkers: tp.List[tp.Union[tp.Callable[[tp.Any], bool], tp.Callable[[tp.Any], float]]] = [] self._constraint_checkers: tp.List[tp.Callable[[tp.Any], tp.Union[bool, float]]] = [] + self._name: tp.Optional[str] = None self._frozen = False self._descriptors: tp.Optional[utils.Descriptors] = None self._meta: tp.Dict[tp.Hashable, tp.Any] = {} # for anything algorithm related @@ -236,17 +237,6 @@ def __repr__(self) -> str: strings.append(str(self.value)) return ":".join(strings) - def set_name(self: P, name: str) -> P: - """Sets a name and return the current instrumentation (for chaining) - - Parameters - ---------- - name: str - new name to use to represent the Parameter - """ - self._name = name - return self - # %% Constraint management def satisfies_constraints(self) -> bool: diff --git a/nevergrad/parametrization/layers.py b/nevergrad/parametrization/layers.py index b00e03462..0daf78099 100644 --- a/nevergrad/parametrization/layers.py +++ b/nevergrad/parametrization/layers.py @@ -5,6 +5,8 @@ class _ScalarCasting(utils.Layered): + """Cast Array as a scalar""" + def _get_value(self) -> float: out = super()._get_value() # pulls from previous layer if not isinstance(out, np.ndarray) or not out.size == 1: @@ -20,5 +22,7 @@ def _set_value(self, value: tp.Any) -> None: class IntegerCasting(utils.Layered): + """Cast Data as integer (or integer array)""" + def _get_value(self) -> np.ndarray: return np.round(super()._get_value()).astype(int) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index 83757d378..9d0a401fd 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -321,10 +321,15 @@ def __init__(self) -> None: self._name: tp.Optional[str] = None def _get_layer_index(self) -> int: - print(self, self._layers) + print("self", self.name) + print("index", self._index) + print("layers", [l.name for l in self._layers]) if self._layers[self._index] is not self: + layers = [f"{l.name}({l._index})" for l in self._layers] raise errors.NevergradRuntimeError( - "Layer indexing has changed for an unknown reason. Please open an issue" + "Layer indexing has changed for an unknown reason. Please open an issue:\n" + f"Caller at index {self._index}: {self.name}" + f"Layers: {layers}.\n" ) return self._index @@ -332,6 +337,7 @@ def _get_value(self) -> tp.Any: index = self._get_layer_index() if not index: # root must have an implementation raise NotImplementedError + print(f"getting {index - 1} from {index}") return self._layers[index - 1]._get_value() def _set_value(self, value: tp.Any) -> tp.Any: @@ -349,10 +355,13 @@ def add_layer(self: L, other: "Layered") -> L: raise errors.NevergradRuntimeError("Layers can only be added from the root.") if len(other._layers) > 1: raise errors.NevergradRuntimeError("Cannot append multiple layers at once") + print(f"Inserting {other.name}") if other._LAYER_LEVEL >= self._layers[-1]._LAYER_LEVEL: + print("ordered") other._index = len(self._layers) self._layers.append(other) else: + print("unordered") ind = bisect.bisect_right([x._LAYER_LEVEL for x in self._layers], other._LAYER_LEVEL) self._layers.insert(ind, other) for k, x in enumerate(self._layers): @@ -364,7 +373,7 @@ def copy(self: L) -> L: """Creates a new unattached layer with the same behavior""" new = copy.copy(self) new._layers = [new] - self._index = 0 + new._index = 0 return new # naming capacity From bfbc9e073905dbaf98d1daf308de1e73b3988b63 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:05:30 +0100 Subject: [PATCH 12/39] step --- nevergrad/parametrization/layers.py | 2 +- nevergrad/parametrization/utils.py | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/nevergrad/parametrization/layers.py b/nevergrad/parametrization/layers.py index 0daf78099..791ef0730 100644 --- a/nevergrad/parametrization/layers.py +++ b/nevergrad/parametrization/layers.py @@ -11,7 +11,7 @@ def _get_value(self) -> float: out = super()._get_value() # pulls from previous layer if not isinstance(out, np.ndarray) or not out.size == 1: raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") - integer = issubclass(out.dtype, np.int) + integer = np.issubdtype(out.dtype, np.int) out = (int if integer else float)(out[0]) return out # type: ignore diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index 9d0a401fd..a79980045 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -355,13 +355,10 @@ def add_layer(self: L, other: "Layered") -> L: raise errors.NevergradRuntimeError("Layers can only be added from the root.") if len(other._layers) > 1: raise errors.NevergradRuntimeError("Cannot append multiple layers at once") - print(f"Inserting {other.name}") if other._LAYER_LEVEL >= self._layers[-1]._LAYER_LEVEL: - print("ordered") other._index = len(self._layers) self._layers.append(other) else: - print("unordered") ind = bisect.bisect_right([x._LAYER_LEVEL for x in self._layers], other._LAYER_LEVEL) self._layers.insert(ind, other) for k, x in enumerate(self._layers): From a0cdf507766618e514cd49d115283664491241de Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:10:08 +0100 Subject: [PATCH 13/39] working_revert --- nevergrad/parametrization/data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index 247c2550c..337ea17cf 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -340,8 +340,8 @@ def set_integer_casting(self: D) -> D: difficult. It is especially ill-advised to use this with a range smaller than 10, or a sigma lower than 1. In those cases, you should rather use a TransitionChoice instead. """ - self.add_layer(layers.IntegerCasting()) - # self.integer = True + # self.add_layer(layers.IntegerCasting()) + self.integer = True return self # pylint: disable=unused-argument From c775aed37543300e02d2659bb3e80ec859414a7b Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:13:12 +0100 Subject: [PATCH 14/39] reformat --- nevergrad/parametrization/data.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index 337ea17cf..e427ea406 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -401,16 +401,6 @@ def spawn_child(self: D, new_value: tp.Optional[tp.Any] = None) -> D: child.value = new_value return child - -class Array(Data): - - value: core.ValueProperty[np.ndarray] = core.ValueProperty() - - def _get_value(self) -> np.ndarray: - if self.integer: - return np.round(self._value) # type: ignore - return self._value - def _set_value(self, value: tp.ArrayLike) -> None: self._check_frozen() self._ref_data = None @@ -429,6 +419,16 @@ def _set_value(self, value: tp.ArrayLike) -> None: self._value = value +class Array(Data): + + value: core.ValueProperty[np.ndarray] = core.ValueProperty() + + def _get_value(self) -> np.ndarray: + if self.integer: + return np.round(self._value) # type: ignore + return self._value + + class Scalar(Data): """Parameter representing a scalar. @@ -479,10 +479,9 @@ def _get_value(self) -> float: return float(self._value[0]) if not self.integer else int(np.round(self._value[0])) def _set_value(self, value: float) -> None: - self._check_frozen() if not isinstance(value, (float, int, np.float, np.int)): raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") - self._value = np.array([value], dtype=float) + super()._set_value(np.array([value], dtype=float)) # pylint: disable=unused-argument From 9c37dbe70e0411966030c865caca84ae8ed26c13 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:25:38 +0100 Subject: [PATCH 15/39] somefixes --- nevergrad/parametrization/data.py | 19 +++++++++++++------ nevergrad/parametrization/layers.py | 8 ++++++-- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index e427ea406..c2523ad03 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -418,6 +418,9 @@ def _set_value(self, value: tp.ArrayLike) -> None: raise ValueError("Logirithmic values cannot be negative") self._value = value + def _get_value(self) -> float: + return self._value + class Array(Data): @@ -464,6 +467,7 @@ def __init__( ) -> None: bounded = all(a is not None for a in (lower, upper)) no_init = init is None + print("bounded", bounded) if bounded: if init is None: init = (lower + upper) / 2.0 # type: ignore @@ -471,17 +475,20 @@ def __init__( init = 0.0 super().__init__(init=np.array([init]), mutable_sigma=mutable_sigma) if bounded: + print(f"Setting sigma {(upper - lower) / 6}") self.set_mutation(sigma=(upper - lower) / 6) # type: ignore + print("sigma", self.sigma) if any(a is not None for a in (lower, upper)): self.set_bounds(lower=lower, upper=upper, full_range_sampling=bounded and no_init) + self.add_layer(layers._ScalarCasting()) - def _get_value(self) -> float: - return float(self._value[0]) if not self.integer else int(np.round(self._value[0])) + # def _get_value(self) -> float: + # return float(self._value[0]) if not self.integer else int(np.round(self._value[0])) - def _set_value(self, value: float) -> None: - if not isinstance(value, (float, int, np.float, np.int)): - raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") - super()._set_value(np.array([value], dtype=float)) + # def _set_value(self, value: float) -> None: + # if not isinstance(value, (float, int, np.float, np.int)): + # raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") + # super()._set_value(np.array([value], dtype=float)) # pylint: disable=unused-argument diff --git a/nevergrad/parametrization/layers.py b/nevergrad/parametrization/layers.py index 791ef0730..25ae88d6f 100644 --- a/nevergrad/parametrization/layers.py +++ b/nevergrad/parametrization/layers.py @@ -7,22 +7,26 @@ class _ScalarCasting(utils.Layered): """Cast Array as a scalar""" + _LAYER_LEVEL = 20 # last layer + def _get_value(self) -> float: out = super()._get_value() # pulls from previous layer if not isinstance(out, np.ndarray) or not out.size == 1: raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") - integer = np.issubdtype(out.dtype, np.int) + integer = np.issubdtype(out.dtype, np.int64) out = (int if integer else float)(out[0]) return out # type: ignore def _set_value(self, value: tp.Any) -> None: if not isinstance(value, (float, int, np.float, np.int)): raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") - value = np.array([value], dtype=float) + super()._set_value(np.array([value], dtype=float)) class IntegerCasting(utils.Layered): """Cast Data as integer (or integer array)""" + _LAYER_LEVEL = 15 + def _get_value(self) -> np.ndarray: return np.round(super()._get_value()).astype(int) From 20586baa1818a7241b0bb6b27641cfded18a5c64 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:28:16 +0100 Subject: [PATCH 16/39] working --- nevergrad/parametrization/data.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index c2523ad03..c81dd6520 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -340,7 +340,7 @@ def set_integer_casting(self: D) -> D: difficult. It is especially ill-advised to use this with a range smaller than 10, or a sigma lower than 1. In those cases, you should rather use a TransitionChoice instead. """ - # self.add_layer(layers.IntegerCasting()) + self.add_layer(layers.IntegerCasting()) self.integer = True return self @@ -418,7 +418,7 @@ def _set_value(self, value: tp.ArrayLike) -> None: raise ValueError("Logirithmic values cannot be negative") self._value = value - def _get_value(self) -> float: + def _get_value(self) -> np.ndarray: return self._value @@ -426,10 +426,10 @@ class Array(Data): value: core.ValueProperty[np.ndarray] = core.ValueProperty() - def _get_value(self) -> np.ndarray: - if self.integer: - return np.round(self._value) # type: ignore - return self._value + # def _get_value(self) -> np.ndarray: + # if self.integer: + # return np.round(self._value) # type: ignore + # return self._value class Scalar(Data): @@ -475,9 +475,7 @@ def __init__( init = 0.0 super().__init__(init=np.array([init]), mutable_sigma=mutable_sigma) if bounded: - print(f"Setting sigma {(upper - lower) / 6}") self.set_mutation(sigma=(upper - lower) / 6) # type: ignore - print("sigma", self.sigma) if any(a is not None for a in (lower, upper)): self.set_bounds(lower=lower, upper=upper, full_range_sampling=bounded and no_init) self.add_layer(layers._ScalarCasting()) From 1a234406e6be669c8be99c3dd6c9d37e624a5db6 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:29:48 +0100 Subject: [PATCH 17/39] cleaning --- nevergrad/parametrization/data.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index c81dd6520..a53e86ea4 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -164,8 +164,6 @@ def set_bounds( upper: tp.BoundValue = None, method: str = "bouncing", full_range_sampling: tp.Optional[bool] = None, - a_min: tp.BoundValue = None, - a_max: tp.BoundValue = None, ) -> D: """Bounds all real values into [lower, upper] using a provided method @@ -201,7 +199,6 @@ def set_bounds( - "tanh" reaches the boundaries really quickly, while "arctan" is much softer - only "clipping" accepts partial bounds (None values) """ # TODO improve description of methods - lower, upper = _a_min_max_deprecation(**locals()) bounds = tuple( a if isinstance(a, np.ndarray) or a is None else np.array([a], dtype=float) for a in (lower, upper) @@ -480,29 +477,6 @@ def __init__( self.set_bounds(lower=lower, upper=upper, full_range_sampling=bounded and no_init) self.add_layer(layers._ScalarCasting()) - # def _get_value(self) -> float: - # return float(self._value[0]) if not self.integer else int(np.round(self._value[0])) - - # def _set_value(self, value: float) -> None: - # if not isinstance(value, (float, int, np.float, np.int)): - # raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") - # super()._set_value(np.array([value], dtype=float)) - - -# pylint: disable=unused-argument -def _a_min_max_deprecation( - a_min: tp.Any, a_max: tp.Any, lower: tp.Any, upper: tp.Any, **kwargs: tp.Any -) -> tp.Tuple[tp.Any, tp.Any]: - if a_min is not None: - warnings.warn('"a_min" is deprecated in favor of "lower" for clarity', DeprecationWarning) - assert lower is None, "Use only lower, and not a_min" - lower = a_min - if a_max is not None: - warnings.warn('"a_max" is deprecated in favor of "upper" for clarity', DeprecationWarning) - assert upper is None, "Use only upper, and not a_max" - upper = a_max - return lower, upper - class Log(Scalar): """Parameter representing a positive variable, mutated by Gaussian mutation in log-scale. @@ -535,10 +509,7 @@ def __init__( lower: tp.Optional[float] = None, upper: tp.Optional[float] = None, mutable_sigma: bool = False, - a_min: tp.Optional[float] = None, - a_max: tp.Optional[float] = None, ) -> None: - lower, upper = _a_min_max_deprecation(**locals()) no_init = init is None bounded = all(a is not None for a in (lower, upper)) if bounded: From 6c552a26df4f8dbb4ce0fcaedf512efa75ffde77 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:34:30 +0100 Subject: [PATCH 18/39] prints --- nevergrad/parametrization/data.py | 6 ------ nevergrad/parametrization/utils.py | 1 - 2 files changed, 7 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index a53e86ea4..17ae6eb7d 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -423,11 +423,6 @@ class Array(Data): value: core.ValueProperty[np.ndarray] = core.ValueProperty() - # def _get_value(self) -> np.ndarray: - # if self.integer: - # return np.round(self._value) # type: ignore - # return self._value - class Scalar(Data): """Parameter representing a scalar. @@ -464,7 +459,6 @@ def __init__( ) -> None: bounded = all(a is not None for a in (lower, upper)) no_init = init is None - print("bounded", bounded) if bounded: if init is None: init = (lower + upper) / 2.0 # type: ignore diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index a79980045..f2bbaeb1c 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -337,7 +337,6 @@ def _get_value(self) -> tp.Any: index = self._get_layer_index() if not index: # root must have an implementation raise NotImplementedError - print(f"getting {index - 1} from {index}") return self._layers[index - 1]._get_value() def _set_value(self, value: tp.Any) -> tp.Any: From db2902cb6b4fb000a310922b8097c1bace666eef Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:42:21 +0100 Subject: [PATCH 19/39] move --- nevergrad/parametrization/_layering.py | 179 +++++++++++++++++++++++++ nevergrad/parametrization/core.py | 5 +- nevergrad/parametrization/data.py | 6 +- nevergrad/parametrization/layers.py | 32 ----- nevergrad/parametrization/utils.py | 145 -------------------- 5 files changed, 185 insertions(+), 182 deletions(-) create mode 100644 nevergrad/parametrization/_layering.py delete mode 100644 nevergrad/parametrization/layers.py diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py new file mode 100644 index 000000000..037ff5cf9 --- /dev/null +++ b/nevergrad/parametrization/_layering.py @@ -0,0 +1,179 @@ +import copy +import bisect +import numpy as np +from nevergrad.common import errors +import nevergrad.common.typing as tp + + +L = tp.TypeVar("L", bound="Layered") +X = tp.TypeVar("X") + + +class Layered: + """Hidden API for overriding/modifying the behavior of a Parameter, + which is itself a Layered object. + + Layers can be added and will be ordered depending on their level + 0: root + 1-10: bounds + 11-20: casting + 21-30: casting + """ + + _LAYER_LEVEL = 1.0 + + def __init__(self) -> None: + self._layers = [self] + self._index = 0 + self._name: tp.Optional[str] = None + + def _get_layer_index(self) -> int: + print("self", self.name) + print("index", self._index) + print("layers", [l.name for l in self._layers]) + if self._layers[self._index] is not self: + layers = [f"{l.name}({l._index})" for l in self._layers] + raise errors.NevergradRuntimeError( + "Layer indexing has changed for an unknown reason. Please open an issue:\n" + f"Caller at index {self._index}: {self.name}" + f"Layers: {layers}.\n" + ) + return self._index + + def _get_value(self) -> tp.Any: + index = self._get_layer_index() + if not index: # root must have an implementation + raise NotImplementedError + return self._layers[index - 1]._get_value() + + def _set_value(self, value: tp.Any) -> tp.Any: + index = self._get_layer_index() + if not index: # root must have an implementation + raise NotImplementedError + self._layers[index - 1]._set_value(value) + + def _del_value(self) -> tp.Any: + pass + + def add_layer(self: L, other: "Layered") -> L: + """Adds a layer which will modify the object behavior""" + if self is not self._layers[0] or self._LAYER_LEVEL: + raise errors.NevergradRuntimeError("Layers can only be added from the root.") + if len(other._layers) > 1: + raise errors.NevergradRuntimeError("Cannot append multiple layers at once") + if other._LAYER_LEVEL >= self._layers[-1]._LAYER_LEVEL: + other._index = len(self._layers) + self._layers.append(other) + else: + ind = bisect.bisect_right([x._LAYER_LEVEL for x in self._layers], other._LAYER_LEVEL) + self._layers.insert(ind, other) + for k, x in enumerate(self._layers): + x._index = k + other._layers = self._layers + return self + + def copy(self: L) -> L: + """Creates a new unattached layer with the same behavior""" + new = copy.copy(self) + new._layers = [new] + new._index = 0 + return new + + # naming capacity + + def _get_name(self) -> str: + """Internal implementation of parameter name. This should be value independant, and should not account + for internal/model parameters. + """ + return self.__class__.__name__ + + def __repr__(self) -> str: + return self.name + + @property + def name(self) -> str: + """Name of the parameter + This is used to keep track of how this Parameter is configured (included through internal/model parameters), + mostly for reproducibility A default version is always provided, but can be overriden directly + through the attribute, or through the set_name method (which allows chaining). + """ + if self._name is not None: + return self._name + return self._get_name() + + @name.setter + def name(self, name: str) -> None: + self.set_name(name) # with_name allows chaining + + def set_name(self: L, name: str) -> L: + """Sets a name and return the current instrumentation (for chaining) + + Parameters + ---------- + name: str + new name to use to represent the Parameter + """ + self._name = name + return self + + +class ValueProperty(tp.Generic[X]): + """Typed property (descriptor) object so that the value attribute of + Parameter objects fetches _get_value and _set_value methods + """ + + # This uses the descriptor protocol, like a property: + # See https://docs.python.org/3/howto/descriptor.html + # + # Basically parameter.value calls parameter.value.__get__ + # and then parameter._get_value + def __init__(self) -> None: + self.__doc__ = """Value of the Parameter, which should be sent to the function + to optimize. + + Example + ------- + >>> ng.p.Array(shape=(2,)).value + array([0., 0.]) + """ + + def __get__(self, obj: Layered, objtype: tp.Optional[tp.Type[object]] = None) -> X: + return obj._layers[-1]._get_value() # type: ignore + + def __set__(self, obj: Layered, value: X) -> None: + obj._layers[-1]._set_value(value) + + def __delete__(self, obj: Layered) -> None: + for layer in obj._layers: + layer._del_value() + + +# Basic data layers + + +class _ScalarCasting(Layered): + """Cast Array as a scalar""" + + _LAYER_LEVEL = 20 # last layer + + def _get_value(self) -> float: + out = super()._get_value() # pulls from previous layer + if not isinstance(out, np.ndarray) or not out.size == 1: + raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") + integer = np.issubdtype(out.dtype, np.int64) + out = (int if integer else float)(out[0]) + return out # type: ignore + + def _set_value(self, value: tp.Any) -> None: + if not isinstance(value, (float, int, np.float, np.int)): + raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") + super()._set_value(np.array([value], dtype=float)) + + +class IntegerCasting(Layered): + """Cast Data as integer (or integer array)""" + + _LAYER_LEVEL = 15 + + def _get_value(self) -> np.ndarray: + return np.round(super()._get_value()).astype(int) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 257bf7b12..58b75d290 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -9,14 +9,15 @@ import nevergrad.common.typing as tp from nevergrad.common import errors from . import utils -from .utils import ValueProperty as ValueProperty +from ._layering import ValueProperty as ValueProperty +from ._layering import Layered as Layered P = tp.TypeVar("P", bound="Parameter") # pylint: disable=too-many-public-methods -class Parameter(utils.Layered): +class Parameter(Layered): """Class providing the core functionality of a parameter, aka value, internal/model parameters, mutation, recombination and additional features such as shared random state, diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index 17ae6eb7d..faac85994 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -7,8 +7,8 @@ import warnings import numpy as np import nevergrad.common.typing as tp +from . import _layering from . import core -from . import layers from .container import Dict from . import utils from . import transforms as trans @@ -337,7 +337,7 @@ def set_integer_casting(self: D) -> D: difficult. It is especially ill-advised to use this with a range smaller than 10, or a sigma lower than 1. In those cases, you should rather use a TransitionChoice instead. """ - self.add_layer(layers.IntegerCasting()) + self.add_layer(_layering.IntegerCasting()) self.integer = True return self @@ -469,7 +469,7 @@ def __init__( self.set_mutation(sigma=(upper - lower) / 6) # type: ignore if any(a is not None for a in (lower, upper)): self.set_bounds(lower=lower, upper=upper, full_range_sampling=bounded and no_init) - self.add_layer(layers._ScalarCasting()) + self.add_layer(_layering._ScalarCasting()) class Log(Scalar): diff --git a/nevergrad/parametrization/layers.py b/nevergrad/parametrization/layers.py deleted file mode 100644 index 25ae88d6f..000000000 --- a/nevergrad/parametrization/layers.py +++ /dev/null @@ -1,32 +0,0 @@ -import numpy as np -from nevergrad.common import errors -import nevergrad.common.typing as tp -from . import utils - - -class _ScalarCasting(utils.Layered): - """Cast Array as a scalar""" - - _LAYER_LEVEL = 20 # last layer - - def _get_value(self) -> float: - out = super()._get_value() # pulls from previous layer - if not isinstance(out, np.ndarray) or not out.size == 1: - raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") - integer = np.issubdtype(out.dtype, np.int64) - out = (int if integer else float)(out[0]) - return out # type: ignore - - def _set_value(self, value: tp.Any) -> None: - if not isinstance(value, (float, int, np.float, np.int)): - raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") - super()._set_value(np.array([value], dtype=float)) - - -class IntegerCasting(utils.Layered): - """Cast Data as integer (or integer array)""" - - _LAYER_LEVEL = 15 - - def _get_value(self) -> np.ndarray: - return np.round(super()._get_value()).astype(int) diff --git a/nevergrad/parametrization/utils.py b/nevergrad/parametrization/utils.py index f2bbaeb1c..5568f4a9f 100644 --- a/nevergrad/parametrization/utils.py +++ b/nevergrad/parametrization/utils.py @@ -5,14 +5,11 @@ import os import sys -import copy import shutil -import bisect import tempfile import subprocess from pathlib import Path import numpy as np -from nevergrad.common import errors from nevergrad.common import typing as tp from nevergrad.common import tools as ngtools @@ -297,145 +294,3 @@ def float_penalty(x: tp.Union[bool, float]) -> float: elif isinstance(x, (float, np.float)): return -min(0, x) # Negative ==> >0 raise TypeError(f"Only bools and floats are supported for check constaint, but got: {x} ({type(x)})") - - -L = tp.TypeVar("L", bound="Layered") - - -class Layered: - """Hidden API for overriding/modifying the behavior of a Parameter, - which is itself a Layered object. - - Layers can be added and will be ordered depending on their level - 0: root - 1-10: bounds - 11-20: casting - 21-30: casting - """ - - _LAYER_LEVEL = 1.0 - - def __init__(self) -> None: - self._layers = [self] - self._index = 0 - self._name: tp.Optional[str] = None - - def _get_layer_index(self) -> int: - print("self", self.name) - print("index", self._index) - print("layers", [l.name for l in self._layers]) - if self._layers[self._index] is not self: - layers = [f"{l.name}({l._index})" for l in self._layers] - raise errors.NevergradRuntimeError( - "Layer indexing has changed for an unknown reason. Please open an issue:\n" - f"Caller at index {self._index}: {self.name}" - f"Layers: {layers}.\n" - ) - return self._index - - def _get_value(self) -> tp.Any: - index = self._get_layer_index() - if not index: # root must have an implementation - raise NotImplementedError - return self._layers[index - 1]._get_value() - - def _set_value(self, value: tp.Any) -> tp.Any: - index = self._get_layer_index() - if not index: # root must have an implementation - raise NotImplementedError - self._layers[index - 1]._set_value(value) - - def _del_value(self) -> tp.Any: - pass - - def add_layer(self: L, other: "Layered") -> L: - """Adds a layer which will modify the object behavior""" - if self is not self._layers[0] or self._LAYER_LEVEL: - raise errors.NevergradRuntimeError("Layers can only be added from the root.") - if len(other._layers) > 1: - raise errors.NevergradRuntimeError("Cannot append multiple layers at once") - if other._LAYER_LEVEL >= self._layers[-1]._LAYER_LEVEL: - other._index = len(self._layers) - self._layers.append(other) - else: - ind = bisect.bisect_right([x._LAYER_LEVEL for x in self._layers], other._LAYER_LEVEL) - self._layers.insert(ind, other) - for k, x in enumerate(self._layers): - x._index = k - other._layers = self._layers - return self - - def copy(self: L) -> L: - """Creates a new unattached layer with the same behavior""" - new = copy.copy(self) - new._layers = [new] - new._index = 0 - return new - - # naming capacity - - def _get_name(self) -> str: - """Internal implementation of parameter name. This should be value independant, and should not account - for internal/model parameters. - """ - return self.__class__.__name__ - - def __repr__(self) -> str: - return self.name - - @property - def name(self) -> str: - """Name of the parameter - This is used to keep track of how this Parameter is configured (included through internal/model parameters), - mostly for reproducibility A default version is always provided, but can be overriden directly - through the attribute, or through the set_name method (which allows chaining). - """ - if self._name is not None: - return self._name - return self._get_name() - - @name.setter - def name(self, name: str) -> None: - self.set_name(name) # with_name allows chaining - - def set_name(self: L, name: str) -> L: - """Sets a name and return the current instrumentation (for chaining) - - Parameters - ---------- - name: str - new name to use to represent the Parameter - """ - self._name = name - return self - - -class ValueProperty(tp.Generic[X]): - """Typed property (descriptor) object so that the value attribute of - Parameter objects fetches _get_value and _set_value methods - """ - - # This uses the descriptor protocol, like a property: - # See https://docs.python.org/3/howto/descriptor.html - # - # Basically parameter.value calls parameter.value.__get__ - # and then parameter._get_value - def __init__(self) -> None: - self.__doc__ = """Value of the Parameter, which should be sent to the function - to optimize. - - Example - ------- - >>> ng.p.Array(shape=(2,)).value - array([0., 0.]) - """ - - def __get__(self, obj: Layered, objtype: tp.Optional[tp.Type[object]] = None) -> X: - return obj._layers[-1]._get_value() # type: ignore - - def __set__(self, obj: Layered, value: X) -> None: - obj._layers[-1]._set_value(value) - - def __delete__(self, obj: Layered) -> None: - for layer in obj._layers: - layer._del_value() From dae5f9141ac2c8bf75eb36839beb9669eeac9a62 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:44:24 +0100 Subject: [PATCH 20/39] header --- nevergrad/parametrization/_layering.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 037ff5cf9..58b9e5644 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -1,3 +1,8 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + import copy import bisect import numpy as np From f7f0a380207e83ba6c16c63ab846f502b1021304 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 14:57:52 +0100 Subject: [PATCH 21/39] remove_attr --- nevergrad/parametrization/data.py | 16 ++++++++++------ nevergrad/parametrization/test_parameter.py | 5 ++--- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index faac85994..30ff032fa 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -41,6 +41,9 @@ class Mutation(core.Parameter): Recombinations should take several """ + # NOTE: this API should disappear in favor of the layer API + # (a layer can modify the mutation scheme) + # pylint: disable=unused-argument value: core.ValueProperty[tp.Callable[[tp.Sequence[D]], None]] = core.ValueProperty() @@ -113,7 +116,6 @@ def __init__( self._value = np.zeros(shape) else: raise ValueError(err_msg) - self.integer = False self.exponent: tp.Optional[float] = None self.bounds: tp.Tuple[tp.Optional[np.ndarray], tp.Optional[np.ndarray]] = (None, None) self.bound_transform: tp.Optional[trans.BoundTransform] = None @@ -121,13 +123,13 @@ def __init__( self._ref_data: tp.Optional[np.ndarray] = None def _compute_descriptors(self) -> utils.Descriptors: - return utils.Descriptors(continuous=not self.integer) + return utils.Descriptors(continuous=not self._integer) def _get_name(self) -> str: cls = self.__class__.__name__ descriptors: tp.List[str] = ( ["int"] - if self.integer + if self._integer else ([str(self._value.shape).replace(" ", "")] if self._value.shape != (1,) else []) ) descriptors += [f"exp={self.exponent}"] if self.exponent is not None else [] @@ -337,9 +339,11 @@ def set_integer_casting(self: D) -> D: difficult. It is especially ill-advised to use this with a range smaller than 10, or a sigma lower than 1. In those cases, you should rather use a TransitionChoice instead. """ - self.add_layer(_layering.IntegerCasting()) - self.integer = True - return self + return self.add_layer(_layering.IntegerCasting()) + + @property + def _integer(self) -> bool: + return any(isinstance(x, _layering.IntegerCasting) for x in self._layers) # pylint: disable=unused-argument def _internal_set_standardized_data( diff --git a/nevergrad/parametrization/test_parameter.py b/nevergrad/parametrization/test_parameter.py index a35b56ca9..38e0c8397 100644 --- a/nevergrad/parametrization/test_parameter.py +++ b/nevergrad/parametrization/test_parameter.py @@ -51,10 +51,10 @@ def _true(*args: tp.Any, **kwargs: tp.Any) -> bool: # pylint: disable=unused-ar return True -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "param", [ - par.Array(shape=(2, 2)), # type: ignore + par.Array(shape=(2, 2)), par.Array(init=np.ones(3)).set_mutation(sigma=3, exponent=5), par.Scalar(), par.Scalar(1.0).set_mutation(exponent=2.0), @@ -130,7 +130,6 @@ def check_parameter_features(param: par.Parameter) -> None: # array info transfer: if isinstance(param, par.Data): for name in ( - "integer", "exponent", "bounds", "bound_transform", From 9e43ad5e5ff7f48733ac342b91ebad61f4c34d1b Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 15:02:37 +0100 Subject: [PATCH 22/39] prints --- nevergrad/parametrization/_layering.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 58b9e5644..44c53ced8 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -33,9 +33,6 @@ def __init__(self) -> None: self._name: tp.Optional[str] = None def _get_layer_index(self) -> int: - print("self", self.name) - print("index", self._index) - print("layers", [l.name for l in self._layers]) if self._layers[self._index] is not self: layers = [f"{l.name}({l._index})" for l in self._layers] raise errors.NevergradRuntimeError( From 65d010ce4c31583d73f2c382a225284be7095b1c Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 15:04:19 +0100 Subject: [PATCH 23/39] print --- nevergrad/parametrization/data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index 30ff032fa..c5a5b706c 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -384,7 +384,6 @@ def recombine(self: D, *others: D) -> None: if recomb is None: return all_params = [self] + list(others) - print(all_params) if isinstance(recomb, str) and recomb == "average": all_arrays = [p.get_standardized_data(reference=self) for p in all_params] self.set_standardized_data(np.mean(all_arrays, axis=0), deterministic=False) From ba3eed76cdd0832f86541d67981c8dae605d9357 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 15:36:39 +0100 Subject: [PATCH 24/39] fix --- nevergrad/parametrization/data.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index c5a5b706c..c61c173ea 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -123,13 +123,13 @@ def __init__( self._ref_data: tp.Optional[np.ndarray] = None def _compute_descriptors(self) -> utils.Descriptors: - return utils.Descriptors(continuous=not self._integer) + return utils.Descriptors(continuous=not self.integer) def _get_name(self) -> str: cls = self.__class__.__name__ descriptors: tp.List[str] = ( ["int"] - if self._integer + if self.integer else ([str(self._value.shape).replace(" ", "")] if self._value.shape != (1,) else []) ) descriptors += [f"exp={self.exponent}"] if self.exponent is not None else [] @@ -342,7 +342,7 @@ def set_integer_casting(self: D) -> D: return self.add_layer(_layering.IntegerCasting()) @property - def _integer(self) -> bool: + def integer(self) -> bool: return any(isinstance(x, _layering.IntegerCasting) for x in self._layers) # pylint: disable=unused-argument From d2e337014534e8ae5174a161b540e8f79d416649 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 15:41:42 +0100 Subject: [PATCH 25/39] mypy --- nevergrad/parametrization/_layering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 44c53ced8..cf7d3a706 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -178,4 +178,4 @@ class IntegerCasting(Layered): _LAYER_LEVEL = 15 def _get_value(self) -> np.ndarray: - return np.round(super()._get_value()).astype(int) + return np.round(super()._get_value()).astype(int) # type: ignore From 0e94d226dd3089559179456d5c6598d59e1ba75c Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 16:02:45 +0100 Subject: [PATCH 26/39] fix --- nevergrad/parametrization/_layering.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index cf7d3a706..3af490f0c 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -162,7 +162,7 @@ def _get_value(self) -> float: out = super()._get_value() # pulls from previous layer if not isinstance(out, np.ndarray) or not out.size == 1: raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") - integer = np.issubdtype(out.dtype, np.int64) + integer = np.issubdtype(out.dtype, np.integer) out = (int if integer else float)(out[0]) return out # type: ignore @@ -179,3 +179,12 @@ class IntegerCasting(Layered): def _get_value(self) -> np.ndarray: return np.round(super()._get_value()).astype(int) # type: ignore + + +class Modulo(Layered): + """Cast Data as integer (or integer array)""" + + _LAYER_LEVEL = 25 + + def _get_value(self) -> np.ndarray: + return np.round(super()._get_value()).astype(int) # type: ignore From fe938312939fa3eae8c4555a2effb508571b45bb Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 16:28:30 +0100 Subject: [PATCH 27/39] module --- nevergrad/parametrization/_layering.py | 16 +++++++++++++--- nevergrad/parametrization/data.py | 5 +++++ nevergrad/parametrization/test_parameter.py | 9 +++++++++ 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 3af490f0c..eb30a3f5b 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -22,7 +22,7 @@ class Layered: 0: root 1-10: bounds 11-20: casting - 21-30: casting + 21-30: constraints """ _LAYER_LEVEL = 1.0 @@ -184,7 +184,17 @@ def _get_value(self) -> np.ndarray: class Modulo(Layered): """Cast Data as integer (or integer array)""" - _LAYER_LEVEL = 25 + _LAYER_LEVEL = 4 + + def __init__(self, module: tp.Any) -> None: + super().__init__() + if not isinstance(module, (np.ndarray, np.float, np.int, float, int)): + raise TypeError(f"Unsupported type {type(module)} for module") + self._module = module def _get_value(self) -> np.ndarray: - return np.round(super()._get_value()).astype(int) # type: ignore + return super()._get_value() % self._module # type: ignore + + def _set_value(self, value: np.ndarray) -> None: + current = super()._get_value() + super()._set_value(current - (current % self._module) + value) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index c61c173ea..635f0396f 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -421,6 +421,11 @@ def _set_value(self, value: tp.ArrayLike) -> None: def _get_value(self) -> np.ndarray: return self._value + def __mod__(self: D, other: tp.Any) -> D: + new = self.copy() + new.add_layer(_layering.Modulo(other)) + return new + class Array(Data): diff --git a/nevergrad/parametrization/test_parameter.py b/nevergrad/parametrization/test_parameter.py index 38e0c8397..67d29e48b 100644 --- a/nevergrad/parametrization/test_parameter.py +++ b/nevergrad/parametrization/test_parameter.py @@ -402,3 +402,12 @@ def test_array_sampling(method: str, exponent: tp.Optional[float], sigma: float) assert np.any(np.abs(val) > 10) assert np.all(val <= mbound) assert np.all(val >= 1) + + +def test_scalar_module() -> None: + ref = par.Scalar() + x = par.Scalar(10) % 4 + assert x.value == 2 + assert x.get_standardized_data(reference=ref)[0] == 10 + x.value = 1 + assert x.get_standardized_data(reference=ref)[0] == 9 # find the closest From b121776e077525cef47680efb509e260627d8f7d Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 16:44:38 +0100 Subject: [PATCH 28/39] enum --- nevergrad/parametrization/_layering.py | 23 ++++++++++++++++------- nevergrad/parametrization/core.py | 3 ++- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index eb30a3f5b..7122a476a 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -5,6 +5,7 @@ import copy import bisect +from enum import Enum import numpy as np from nevergrad.common import errors import nevergrad.common.typing as tp @@ -14,6 +15,13 @@ X = tp.TypeVar("X") +class Level(Enum): + ROOT = 0.0 + OPERATION = 10.0 + CASTING = 90.0 + FINAL_CASTING = 100.0 + + class Layered: """Hidden API for overriding/modifying the behavior of a Parameter, which is itself a Layered object. @@ -25,7 +33,7 @@ class Layered: 21-30: constraints """ - _LAYER_LEVEL = 1.0 + _LAYER_LEVEL = Level.OPERATION def __init__(self) -> None: self._layers = [self] @@ -59,15 +67,16 @@ def _del_value(self) -> tp.Any: def add_layer(self: L, other: "Layered") -> L: """Adds a layer which will modify the object behavior""" - if self is not self._layers[0] or self._LAYER_LEVEL: + if self is not self._layers[0] or self._LAYER_LEVEL != Level.ROOT: raise errors.NevergradRuntimeError("Layers can only be added from the root.") if len(other._layers) > 1: raise errors.NevergradRuntimeError("Cannot append multiple layers at once") - if other._LAYER_LEVEL >= self._layers[-1]._LAYER_LEVEL: + if other._LAYER_LEVEL.value >= self._layers[-1]._LAYER_LEVEL.value: other._index = len(self._layers) self._layers.append(other) else: - ind = bisect.bisect_right([x._LAYER_LEVEL for x in self._layers], other._LAYER_LEVEL) + levels = [x._LAYER_LEVEL.value for x in self._layers] + ind = bisect.bisect_right(levels, other._LAYER_LEVEL.value) self._layers.insert(ind, other) for k, x in enumerate(self._layers): x._index = k @@ -156,7 +165,7 @@ def __delete__(self, obj: Layered) -> None: class _ScalarCasting(Layered): """Cast Array as a scalar""" - _LAYER_LEVEL = 20 # last layer + _LAYER_LEVEL = Level.FINAL_CASTING # last layer def _get_value(self) -> float: out = super()._get_value() # pulls from previous layer @@ -175,7 +184,7 @@ def _set_value(self, value: tp.Any) -> None: class IntegerCasting(Layered): """Cast Data as integer (or integer array)""" - _LAYER_LEVEL = 15 + _LAYER_LEVEL = Level.CASTING def _get_value(self) -> np.ndarray: return np.round(super()._get_value()).astype(int) # type: ignore @@ -184,7 +193,7 @@ def _get_value(self) -> np.ndarray: class Modulo(Layered): """Cast Data as integer (or integer array)""" - _LAYER_LEVEL = 4 + _LAYER_LEVEL = Level.OPERATION def __init__(self, module: tp.Any) -> None: super().__init__() diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 58b75d290..bacb5ee9d 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -11,6 +11,7 @@ from . import utils from ._layering import ValueProperty as ValueProperty from ._layering import Layered as Layered +from ._layering import Level P = tp.TypeVar("P", bound="Parameter") @@ -34,7 +35,7 @@ class Parameter(Layered): # sub-parameters. # Spawning a child creates a shallow copy. - _LAYER_LEVEL = 0.0 + _LAYER_LEVEL = Level.ROOT value: ValueProperty[tp.Any] = ValueProperty() def __init__(self) -> None: From 2295e10aa65c97709cc02cb61fd5b1d315224419 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 17:02:59 +0100 Subject: [PATCH 29/39] Add array casting --- nevergrad/parametrization/_layering.py | 30 ++++++++++++++++++-------- nevergrad/parametrization/data.py | 7 ++---- 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 7122a476a..d410c8f42 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -16,10 +16,15 @@ class Level(Enum): - ROOT = 0.0 - OPERATION = 10.0 - CASTING = 90.0 - FINAL_CASTING = 100.0 + """Lower level is deeper in the structure""" + + ROOT = 0 + OPERATION = 10 + CASTING = 90 + + # final + ARRAY_CASTING = 900 + INTEGER_CASTING = 1000 # must be the last layer class Layered: @@ -27,10 +32,6 @@ class Layered: which is itself a Layered object. Layers can be added and will be ordered depending on their level - 0: root - 1-10: bounds - 11-20: casting - 21-30: constraints """ _LAYER_LEVEL = Level.OPERATION @@ -165,7 +166,7 @@ def __delete__(self, obj: Layered) -> None: class _ScalarCasting(Layered): """Cast Array as a scalar""" - _LAYER_LEVEL = Level.FINAL_CASTING # last layer + _LAYER_LEVEL = Level.INTEGER_CASTING def _get_value(self) -> float: out = super()._get_value() # pulls from previous layer @@ -181,6 +182,17 @@ def _set_value(self, value: tp.Any) -> None: super()._set_value(np.array([value], dtype=float)) +class ArrayCasting(Layered): + """Cast inputs of type tuple/list etc to array""" + + _LAYER_LEVEL = Level.ARRAY_CASTING + + def _set_value(self, value: tp.ArrayLike) -> None: + if not isinstance(value, (np.ndarray, tuple, list)): + raise TypeError(f"Received a {type(value)} in place of a np.ndarray/tuple/list") + super()._set_value(np.asarray(value)) + + class IntegerCasting(Layered): """Cast Data as integer (or integer array)""" diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index 635f0396f..117d1e4c1 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -121,6 +121,7 @@ def __init__( self.bound_transform: tp.Optional[trans.BoundTransform] = None self.full_range_sampling = False self._ref_data: tp.Optional[np.ndarray] = None + self.add_layer(_layering.ArrayCasting()) def _compute_descriptors(self) -> utils.Descriptors: return utils.Descriptors(continuous=not self.integer) @@ -401,13 +402,9 @@ def spawn_child(self: D, new_value: tp.Optional[tp.Any] = None) -> D: child.value = new_value return child - def _set_value(self, value: tp.ArrayLike) -> None: + def _set_value(self, value: np.ndarray) -> None: self._check_frozen() self._ref_data = None - if not isinstance(value, (np.ndarray, tuple, list)): - raise TypeError(f"Received a {type(value)} in place of a np.ndarray/tuple/list") - value = np.asarray(value) - assert isinstance(value, np.ndarray) if self._value.shape != value.shape: raise ValueError( f"Cannot set array of shape {self._value.shape} with value of shape {value.shape}" From 89ec03f694f10505134b6f99e7713d285b697858 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 18:05:17 +0100 Subject: [PATCH 30/39] Start cache deletion --- nevergrad/parametrization/core.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index bacb5ee9d..66413dc24 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -201,6 +201,7 @@ def set_standardized_data( sent_reference, self.__class__ ), f"Expected {type(self)} but got {type(sent_reference)} as reference" self._check_frozen() + del self.value # remove all cached information self._internal_set_standardized_data( np.array(data, copy=False), reference=sent_reference, deterministic=deterministic ) @@ -227,7 +228,7 @@ def get_value_hash(self) -> tp.Hashable: if isinstance(val, (str, bytes, float, int)): return val elif isinstance(val, np.ndarray): - return val.tobytes() # type: ignore + return val.tobytes() else: raise errors.UnsupportedParameterOperationError( f"Value hash is not supported for object {self.name}" @@ -343,6 +344,7 @@ def spawn_child(self: P, new_value: tp.Optional[tp.Any] = None) -> P: setattr(child, attribute, container) for key, val in self._subobjects.items(): container[key] = val.spawn_child() + del child.value # clear cache if new_value is not None: child.value = new_value return child From 6e82ce24345efb63cc96ca5ae9c42b91a450fc3e Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 18:41:06 +0100 Subject: [PATCH 31/39] deep --- nevergrad/parametrization/_layering.py | 57 ++++++++++++++------------ nevergrad/parametrization/core.py | 3 ++ 2 files changed, 33 insertions(+), 27 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index d410c8f42..bd0531385 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -20,7 +20,6 @@ class Level(Enum): ROOT = 0 OPERATION = 10 - CASTING = 90 # final ARRAY_CASTING = 900 @@ -41,31 +40,6 @@ def __init__(self) -> None: self._index = 0 self._name: tp.Optional[str] = None - def _get_layer_index(self) -> int: - if self._layers[self._index] is not self: - layers = [f"{l.name}({l._index})" for l in self._layers] - raise errors.NevergradRuntimeError( - "Layer indexing has changed for an unknown reason. Please open an issue:\n" - f"Caller at index {self._index}: {self.name}" - f"Layers: {layers}.\n" - ) - return self._index - - def _get_value(self) -> tp.Any: - index = self._get_layer_index() - if not index: # root must have an implementation - raise NotImplementedError - return self._layers[index - 1]._get_value() - - def _set_value(self, value: tp.Any) -> tp.Any: - index = self._get_layer_index() - if not index: # root must have an implementation - raise NotImplementedError - self._layers[index - 1]._set_value(value) - - def _del_value(self) -> tp.Any: - pass - def add_layer(self: L, other: "Layered") -> L: """Adds a layer which will modify the object behavior""" if self is not self._layers[0] or self._LAYER_LEVEL != Level.ROOT: @@ -84,6 +58,27 @@ def add_layer(self: L, other: "Layered") -> L: other._layers = self._layers return self + def _call_deeper(self, name: str, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: + if self._layers[self._index] is not self: + layers = [f"{l.name}({l._index})" for l in self._layers] + raise errors.NevergradRuntimeError( + "Layer indexing has changed for an unknown reason. Please open an issue:\n" + f"Caller at index {self._index}: {self.name}" + f"Layers: {layers}.\n" + ) + if not self._index: # root must have an implementation + raise NotImplementedError + return getattr(self._layers[self._index - 1], name)(*args, **kwargs) + + def _get_value(self) -> tp.Any: + return self._call_deeper("_get_value") + + def _set_value(self, value: tp.Any) -> tp.Any: + self._call_deeper("_set_value", value) + + def _del_value(self) -> None: + self._call_deeper("_del_value") + def copy(self: L) -> L: """Creates a new unattached layer with the same behavior""" new = copy.copy(self) @@ -91,6 +86,14 @@ def copy(self: L) -> L: new._index = 0 return new + def sample(self: L) -> L: + """Sample a new instance of the parameter. + This usually means spawning a child and mutating it. + This function should be used in optimizers when creating an initial population, + and parameter.heritage["lineage"] is reset to parameter.uid instead of its parent's + """ + return self._call_deeper("sample") + # naming capacity def _get_name(self) -> str: @@ -196,7 +199,7 @@ def _set_value(self, value: tp.ArrayLike) -> None: class IntegerCasting(Layered): """Cast Data as integer (or integer array)""" - _LAYER_LEVEL = Level.CASTING + _LAYER_LEVEL = Level.OPERATION def _get_value(self) -> np.ndarray: return np.round(super()._get_value()).astype(int) # type: ignore diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 66413dc24..b28287c4f 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -104,6 +104,9 @@ def dimension(self) -> int: self._dimension = 0 return self._dimension + def _del_value(self) -> None: + pass # used to remove cache, which Parameters should not have + def mutate(self) -> None: """Mutate parameters of the instance, and then its value""" self._check_frozen() From 4e0ac9593d4af6f86c799b4b1c3ef43730ea2a84 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Fri, 12 Feb 2021 19:09:32 +0100 Subject: [PATCH 32/39] bound_layer --- nevergrad/parametrization/_layering.py | 21 +------- nevergrad/parametrization/data.py | 74 +++++++++++++++++++++++++- 2 files changed, 74 insertions(+), 21 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index bd0531385..3dfe2e4b0 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -92,7 +92,7 @@ def sample(self: L) -> L: This function should be used in optimizers when creating an initial population, and parameter.heritage["lineage"] is reset to parameter.uid instead of its parent's """ - return self._call_deeper("sample") + return self._call_deeper("sample") # type: ignore # naming capacity @@ -203,22 +203,3 @@ class IntegerCasting(Layered): def _get_value(self) -> np.ndarray: return np.round(super()._get_value()).astype(int) # type: ignore - - -class Modulo(Layered): - """Cast Data as integer (or integer array)""" - - _LAYER_LEVEL = Level.OPERATION - - def __init__(self, module: tp.Any) -> None: - super().__init__() - if not isinstance(module, (np.ndarray, np.float, np.int, float, int)): - raise TypeError(f"Unsupported type {type(module)} for module") - self._module = module - - def _get_value(self) -> np.ndarray: - return super()._get_value() % self._module # type: ignore - - def _set_value(self, value: np.ndarray) -> None: - current = super()._get_value() - super()._set_value(current - (current % self._module) + value) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index 117d1e4c1..eb642a123 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -7,6 +7,7 @@ import warnings import numpy as np import nevergrad.common.typing as tp +from nevergrad.common import errors from . import _layering from . import core from .container import Dict @@ -420,7 +421,7 @@ def _get_value(self) -> np.ndarray: def __mod__(self: D, other: tp.Any) -> D: new = self.copy() - new.add_layer(_layering.Modulo(other)) + new.add_layer(Modulo(other)) return new @@ -526,3 +527,74 @@ def __init__( self.set_mutation(sigma=1.0, exponent=exponent) if any(a is not None for a in (lower, upper)): self.set_bounds(lower, upper, full_range_sampling=bounded and no_init) + + +# LAYERS + + +class BoundLayer(_layering.Layered): + + _LAYER_LEVEL = _layering.Level.OPERATION + + def __init__( + self, + lower: tp.BoundValue = None, + upper: tp.BoundValue = None, + full_range_sampling: tp.Optional[bool] = None, + ) -> None: + """Bounds all real values into [lower, upper] + + Parameters + ---------- + lower: float or None + minimum value + upper: float or None + maximum value + method: str + One of the following choices: + full_range_sampling: Optional bool + Changes the default behavior of the "sample" method (aka creating a child and mutating it from the current instance) + or the sampling optimizers, to creating a child with a value sampled uniformly (or log-uniformly) within + the while range of the bounds. The "sample" method is used by some algorithms to create an initial population. + This is activated by default if both bounds are provided. + """ # TODO improve description of methods + super().__init__() + self.bounds = tuple( + a if isinstance(a, np.ndarray) or a is None else np.array([a], dtype=float) + for a in (lower, upper) + ) + both_bounds = all(b is not None for b in self.bounds) + self.full_range_sampling = full_range_sampling + if full_range_sampling is None: + self.full_range_sampling = both_bounds + + def sample(self) -> D: + if not self.full_range_sampling: + super().sample() + root = self._layers[0] + if not isinstance(root, Data): + raise errors.NevergradTypeError(f"BoundLayer {self} on a non-Data root {root}") + child = root.spawn_child() + shape = super()._get_value().shape + bounds = tuple(b * np.ones(shape) for b in self.bounds) + diff = bounds[1] - bounds[0] + super().set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) + child.heritage["lineage"] = child.uid + return child + + +class Modulo(BoundLayer): + """Cast Data as integer (or integer array)""" + + def __init__(self, module: tp.Any) -> None: + super().__init__(lower=0, upper=module) + if not isinstance(module, (np.ndarray, np.float, np.int, float, int)): + raise TypeError(f"Unsupported type {type(module)} for module") + self._module = module + + def _get_value(self) -> np.ndarray: + return super()._get_value() % self._module # type: ignore + + def _set_value(self, value: np.ndarray) -> None: + current = super()._get_value() + super()._set_value(current - (current % self._module) + value) From 58883f3716fb7af027e6c420b843fb305bb52fb7 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Sun, 14 Feb 2021 15:43:40 +0100 Subject: [PATCH 33/39] wip --- nevergrad/common/errors.py | 4 ++++ nevergrad/parametrization/data.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/nevergrad/common/errors.py b/nevergrad/common/errors.py index ea7b53a92..be3f03f38 100644 --- a/nevergrad/common/errors.py +++ b/nevergrad/common/errors.py @@ -24,6 +24,10 @@ class NevergradRuntimeError(RuntimeError, NevergradError): """Runtime error raised by Nevergrad""" +class NevergradTypeError(TypeError, NevergradError): + """Runtime error raised by Nevergrad""" + + class TellNotAskedNotSupportedError(NotImplementedError, NevergradError): """To be raised by optimizers which do not support the tell_not_asked interface.""" diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index eb642a123..f52671447 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -568,7 +568,7 @@ def __init__( if full_range_sampling is None: self.full_range_sampling = both_bounds - def sample(self) -> D: + def sample(self: D) -> D: if not self.full_range_sampling: super().sample() root = self._layers[0] @@ -578,7 +578,7 @@ def sample(self) -> D: shape = super()._get_value().shape bounds = tuple(b * np.ones(shape) for b in self.bounds) diff = bounds[1] - bounds[0] - super().set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) + super()._set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) child.heritage["lineage"] = child.uid return child From 1f5bc65ee2eb8c236cfca0e68bad35d7f28e4b51 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Sun, 14 Feb 2021 16:46:17 +0100 Subject: [PATCH 34/39] skip --- nevergrad/common/errors.py | 14 +++++-- nevergrad/parametrization/_layering.py | 52 +++++++++++++++----------- nevergrad/parametrization/choice.py | 12 +++--- nevergrad/parametrization/container.py | 8 ++-- nevergrad/parametrization/core.py | 6 +-- nevergrad/parametrization/data.py | 49 ++++++++++++------------ 6 files changed, 80 insertions(+), 61 deletions(-) diff --git a/nevergrad/common/errors.py b/nevergrad/common/errors.py index be3f03f38..1741046ee 100644 --- a/nevergrad/common/errors.py +++ b/nevergrad/common/errors.py @@ -28,15 +28,23 @@ class NevergradTypeError(TypeError, NevergradError): """Runtime error raised by Nevergrad""" -class TellNotAskedNotSupportedError(NotImplementedError, NevergradError): +class NevergradValueError(ValueError, NevergradError): + """Runtime error raised by Nevergrad""" + + +class NevergradNotImplementedError(NotImplementedError, NevergradError): + """Not implemented functionality""" + + +class TellNotAskedNotSupportedError(NevergradNotImplementedError): """To be raised by optimizers which do not support the tell_not_asked interface.""" -class ExperimentFunctionCopyError(NotImplementedError, NevergradError): +class ExperimentFunctionCopyError(NevergradNotImplementedError): """Raised when the experiment function fails to copy itself (for benchmarks)""" -class UnsupportedExperiment(RuntimeError, unittest.SkipTest, NevergradError): +class UnsupportedExperiment(unittest.SkipTest, NevergradRuntimeError): """Raised if the experiment is not compatible with the current settings: Eg: missing data, missing import, unsupported OS etc This automatically skips tests. diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 3dfe2e4b0..48a47cc86 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -59,6 +59,8 @@ def add_layer(self: L, other: "Layered") -> L: return self def _call_deeper(self, name: str, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: + if not name.startswith("_layered_"): + raise errors.NevergradValueError("For consistency, only _layered functions can be used.") if self._layers[self._index] is not self: layers = [f"{l.name}({l._index})" for l in self._layers] raise errors.NevergradRuntimeError( @@ -66,18 +68,24 @@ def _call_deeper(self, name: str, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: f"Caller at index {self._index}: {self.name}" f"Layers: {layers}.\n" ) - if not self._index: # root must have an implementation - raise NotImplementedError - return getattr(self._layers[self._index - 1], name)(*args, **kwargs) + for index in reversed(range(self._index)): + func = getattr(self._layers[index], name) + if func.__func__ is not getattr(Layered, name): # skip unecessary stack calls + return func(*args, **kwargs) + raise errors.NevergradNotImplementedError(f"No implementation for {name}") + # alternative (stacking all calls): + # if not self._index: # root must have an implementation + # raise errors.NevergradNotImplementedError + # return getattr(self._layers[self._index - 1], name)(*args, **kwargs) - def _get_value(self) -> tp.Any: - return self._call_deeper("_get_value") + def _layered_get_value(self) -> tp.Any: + return self._call_deeper("_layered_get_value") - def _set_value(self, value: tp.Any) -> tp.Any: - self._call_deeper("_set_value", value) + def _layered_set_value(self, value: tp.Any) -> tp.Any: + return self._call_deeper("_layered_set_value", value) - def _del_value(self) -> None: - self._call_deeper("_del_value") + def _layered_del_value(self) -> None: + self._call_deeper("_layered_del_value") def copy(self: L) -> L: """Creates a new unattached layer with the same behavior""" @@ -134,14 +142,14 @@ def set_name(self: L, name: str) -> L: class ValueProperty(tp.Generic[X]): """Typed property (descriptor) object so that the value attribute of - Parameter objects fetches _get_value and _set_value methods + Parameter objects fetches _layered_get_value and _layered_set_value methods """ # This uses the descriptor protocol, like a property: # See https://docs.python.org/3/howto/descriptor.html # # Basically parameter.value calls parameter.value.__get__ - # and then parameter._get_value + # and then parameter._layered_get_value def __init__(self) -> None: self.__doc__ = """Value of the Parameter, which should be sent to the function to optimize. @@ -153,14 +161,14 @@ def __init__(self) -> None: """ def __get__(self, obj: Layered, objtype: tp.Optional[tp.Type[object]] = None) -> X: - return obj._layers[-1]._get_value() # type: ignore + return obj._layers[-1]._layered_get_value() # type: ignore def __set__(self, obj: Layered, value: X) -> None: - obj._layers[-1]._set_value(value) + obj._layers[-1]._layered_set_value(value) def __delete__(self, obj: Layered) -> None: for layer in obj._layers: - layer._del_value() + layer._layered_del_value() # Basic data layers @@ -171,18 +179,18 @@ class _ScalarCasting(Layered): _LAYER_LEVEL = Level.INTEGER_CASTING - def _get_value(self) -> float: - out = super()._get_value() # pulls from previous layer + def _layered_get_value(self) -> float: + out = super()._layered_get_value() # pulls from previous layer if not isinstance(out, np.ndarray) or not out.size == 1: raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") integer = np.issubdtype(out.dtype, np.integer) out = (int if integer else float)(out[0]) return out # type: ignore - def _set_value(self, value: tp.Any) -> None: + def _layered_set_value(self, value: tp.Any) -> None: if not isinstance(value, (float, int, np.float, np.int)): raise TypeError(f"Received a {type(value)} in place of a scalar (float, int)") - super()._set_value(np.array([value], dtype=float)) + super()._layered_set_value(np.array([value], dtype=float)) class ArrayCasting(Layered): @@ -190,10 +198,10 @@ class ArrayCasting(Layered): _LAYER_LEVEL = Level.ARRAY_CASTING - def _set_value(self, value: tp.ArrayLike) -> None: + def _layered_set_value(self, value: tp.ArrayLike) -> None: if not isinstance(value, (np.ndarray, tuple, list)): raise TypeError(f"Received a {type(value)} in place of a np.ndarray/tuple/list") - super()._set_value(np.asarray(value)) + super()._layered_set_value(np.asarray(value)) class IntegerCasting(Layered): @@ -201,5 +209,5 @@ class IntegerCasting(Layered): _LAYER_LEVEL = Level.OPERATION - def _get_value(self) -> np.ndarray: - return np.round(super()._get_value()).astype(int) # type: ignore + def _layered_get_value(self) -> np.ndarray: + return np.round(super()._layered_get_value()).astype(int) # type: ignore diff --git a/nevergrad/parametrization/choice.py b/nevergrad/parametrization/choice.py index d523ea932..dd400b281 100644 --- a/nevergrad/parametrization/choice.py +++ b/nevergrad/parametrization/choice.py @@ -84,12 +84,12 @@ def choices(self) -> container.Tuple: """The different options, as a Tuple Parameter""" return self["choices"] # type: ignore - def _get_value(self) -> tp.Any: + def _layered_get_value(self) -> tp.Any: if self._repetitions is None: return core.as_parameter(self.choices[self.index]).value return tuple(core.as_parameter(self.choices[ind]).value for ind in self.indices) - def _set_value(self, value: tp.List[tp.Any]) -> np.ndarray: + def _layered_set_value(self, value: tp.List[tp.Any]) -> np.ndarray: """Must be adapted to each class This handles a list of values, not just one """ # TODO this is currenlty very messy, may need some improvement @@ -197,8 +197,8 @@ def probabilities(self) -> np.ndarray: exp = np.exp(self.weights.value) return exp / np.sum(exp) # type: ignore - def _set_value(self, value: tp.Any) -> np.ndarray: - indices = super()._set_value(value) + def _layered_set_value(self, value: tp.Any) -> np.ndarray: + indices = super()._layered_set_value(value) self._indices = indices # force new probabilities arity = self.weights.value.shape[1] @@ -274,8 +274,8 @@ def __init__( def indices(self) -> np.ndarray: return np.minimum(len(self) - 1e-9, self.positions.value).astype(int) # type: ignore - def _set_value(self, value: tp.Any) -> np.ndarray: - indices = super()._set_value(value) # only one value for this class + def _layered_set_value(self, value: tp.Any) -> np.ndarray: + indices = super()._layered_set_value(value) # only one value for this class self._set_index(indices) return indices diff --git a/nevergrad/parametrization/container.py b/nevergrad/parametrization/container.py index c2e297669..6d923e84b 100644 --- a/nevergrad/parametrization/container.py +++ b/nevergrad/parametrization/container.py @@ -139,10 +139,10 @@ def items(self) -> tp.ItemsView[str, core.Parameter]: def values(self) -> tp.ValuesView[core.Parameter]: return self._content.values() - def _get_value(self) -> tp.Dict[str, tp.Any]: + def _layered_get_value(self) -> tp.Dict[str, tp.Any]: return {k: p.value for k, p in self.items()} - def _set_value(self, value: tp.Dict[str, tp.Any]) -> None: + def _layered_set_value(self, value: tp.Dict[str, tp.Any]) -> None: cls = self.__class__.__name__ if not isinstance(value, dict): raise TypeError(f"{cls} value must be a dict, got: {value}\nCurrent value: {self.value}") @@ -190,10 +190,10 @@ def __iter__(self) -> tp.Iterator[core.Parameter]: value: core.ValueProperty[tp.Tuple[tp.Any]] = core.ValueProperty() - def _get_value(self) -> tp.Tuple[tp.Any, ...]: + def _layered_get_value(self) -> tp.Tuple[tp.Any, ...]: return tuple(p.value for p in self) - def _set_value(self, value: tp.Tuple[tp.Any, ...]) -> None: + def _layered_set_value(self, value: tp.Tuple[tp.Any, ...]) -> None: if not isinstance(value, tuple) or not len(value) == len(self): cls = self.__class__.__name__ raise ValueError( diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index b28287c4f..c5b6c602f 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -104,7 +104,7 @@ def dimension(self) -> int: self._dimension = 0 return self._dimension - def _del_value(self) -> None: + def _layered_del_value(self) -> None: pass # used to remove cache, which Parameters should not have def mutate(self) -> None: @@ -417,10 +417,10 @@ def get_value_hash(self) -> tp.Hashable: except errors.UnsupportedParameterOperationError: return "#non-hashable-constant#" - def _get_value(self) -> tp.Any: + def _layered_get_value(self) -> tp.Any: return self._value - def _set_value(self, value: tp.Any) -> None: + def _layered_set_value(self, value: tp.Any) -> None: different = False if isinstance(value, np.ndarray): if not np.equal(value, self._value).all(): diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index f52671447..c4d2c8407 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -7,7 +7,8 @@ import warnings import numpy as np import nevergrad.common.typing as tp -from nevergrad.common import errors + +# from nevergrad.common import errors from . import _layering from . import core from .container import Dict @@ -20,6 +21,8 @@ D = tp.TypeVar("D", bound="Data") P = tp.TypeVar("P", bound=core.Parameter) +# L = tp.TypeVar("L", bound=_layering.Layered) +BL = tp.TypeVar("BL", bound="BoundLayer") def _param_string(parameters: Dict) -> str: @@ -52,10 +55,10 @@ def __init__(self, **kwargs: tp.Any) -> None: super().__init__() self.parameters = Dict(**kwargs) - def _get_value(self) -> tp.Callable[[tp.Sequence[D]], None]: + def _layered_get_value(self) -> tp.Callable[[tp.Sequence[D]], None]: return self.apply - def _set_value(self, value: tp.Any) -> None: + def _layered_set_value(self, value: tp.Any) -> None: raise RuntimeError("Mutation cannot be set.") def _get_name(self) -> str: @@ -403,7 +406,7 @@ def spawn_child(self: D, new_value: tp.Optional[tp.Any] = None) -> D: child.value = new_value return child - def _set_value(self, value: np.ndarray) -> None: + def _layered_set_value(self, value: np.ndarray) -> None: self._check_frozen() self._ref_data = None if self._value.shape != value.shape: @@ -416,7 +419,7 @@ def _set_value(self, value: np.ndarray) -> None: raise ValueError("Logirithmic values cannot be negative") self._value = value - def _get_value(self) -> np.ndarray: + def _layered_get_value(self) -> np.ndarray: return self._value def __mod__(self: D, other: tp.Any) -> D: @@ -568,19 +571,19 @@ def __init__( if full_range_sampling is None: self.full_range_sampling = both_bounds - def sample(self: D) -> D: - if not self.full_range_sampling: - super().sample() - root = self._layers[0] - if not isinstance(root, Data): - raise errors.NevergradTypeError(f"BoundLayer {self} on a non-Data root {root}") - child = root.spawn_child() - shape = super()._get_value().shape - bounds = tuple(b * np.ones(shape) for b in self.bounds) - diff = bounds[1] - bounds[0] - super()._set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) - child.heritage["lineage"] = child.uid - return child + # def sample(self) -> "Data": + # if not self.full_range_sampling: + # super().sample() + # root = self._layers[0] + # if not isinstance(root, Data): + # raise errors.NevergradTypeError(f"BoundLayer {self} on a non-Data root {root}") + # child = root.spawn_child() + # shape = super()._layered_get_value().shape + # bounds = tuple(b * np.ones(shape) for b in self.bounds) + # diff = bounds[1] - bounds[0] + # super()._layered_set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) + # child.heritage["lineage"] = child.uid + # return child class Modulo(BoundLayer): @@ -592,9 +595,9 @@ def __init__(self, module: tp.Any) -> None: raise TypeError(f"Unsupported type {type(module)} for module") self._module = module - def _get_value(self) -> np.ndarray: - return super()._get_value() % self._module # type: ignore + def _layered_get_value(self) -> np.ndarray: + return super()._layered_get_value() % self._module # type: ignore - def _set_value(self, value: np.ndarray) -> None: - current = super()._get_value() - super()._set_value(current - (current % self._module) + value) + def _layered_set_value(self, value: np.ndarray) -> None: + current = super()._layered_get_value() + super()._layered_set_value(current - (current % self._module) + value) From 6d14420fe5461fa34c291889ef1ca4b8860a9c5c Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Sun, 14 Feb 2021 17:08:46 +0100 Subject: [PATCH 35/39] sample --- nevergrad/parametrization/_layering.py | 16 ++++++---------- nevergrad/parametrization/container.py | 3 +-- nevergrad/parametrization/core.py | 13 +++++++++---- nevergrad/parametrization/data.py | 8 +++++--- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 48a47cc86..dbff2d70f 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -72,8 +72,9 @@ def _call_deeper(self, name: str, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: func = getattr(self._layers[index], name) if func.__func__ is not getattr(Layered, name): # skip unecessary stack calls return func(*args, **kwargs) - raise errors.NevergradNotImplementedError(f"No implementation for {name}") - # alternative (stacking all calls): + types = [type(x) for x in self._layers] + raise errors.NevergradNotImplementedError(f"No implementation for {name} on layers: {types}.") + # ALTERNATIVE (stacking all calls): # if not self._index: # root must have an implementation # raise errors.NevergradNotImplementedError # return getattr(self._layers[self._index - 1], name)(*args, **kwargs) @@ -87,6 +88,9 @@ def _layered_set_value(self, value: tp.Any) -> tp.Any: def _layered_del_value(self) -> None: self._call_deeper("_layered_del_value") + def _layered_sample(self) -> "Layered": + return self._call_deeper("_layered_sample") # type: ignore + def copy(self: L) -> L: """Creates a new unattached layer with the same behavior""" new = copy.copy(self) @@ -94,14 +98,6 @@ def copy(self: L) -> L: new._index = 0 return new - def sample(self: L) -> L: - """Sample a new instance of the parameter. - This usually means spawning a child and mutating it. - This function should be used in optimizers when creating an initial population, - and parameter.heritage["lineage"] is reset to parameter.uid instead of its parent's - """ - return self._call_deeper("sample") # type: ignore - # naming capacity def _get_name(self) -> str: diff --git a/nevergrad/parametrization/container.py b/nevergrad/parametrization/container.py index 6d923e84b..dfe5fbfc9 100644 --- a/nevergrad/parametrization/container.py +++ b/nevergrad/parametrization/container.py @@ -101,10 +101,9 @@ def _internal_set_standardized_data( start = end assert end == len(data), f"Finished at {end} but expected {len(data)}" - def sample(self: D) -> D: + def _layered_sample(self: D) -> D: child = self.spawn_child() child._content = {k: p.sample() for k, p in self._content.items()} - child.heritage["lineage"] = child.uid return child diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index c5b6c602f..5b1389a4b 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -119,10 +119,12 @@ def sample(self: P) -> P: This function should be used in optimizers when creating an initial population, and parameter.heritage["lineage"] is reset to parameter.uid instead of its parent's """ - child = self.spawn_child() - child.mutate() - child.heritage["lineage"] = child.uid - return child + # inner working can be overrided by _layer_sample() + child = self._layered_sample() + if not isinstance(self, type(self)): + raise errors.NevergradRuntimeError("Unexpected sample return type") + child.heritage["lineage"] = child.uid # type: ignore + return child # type: ignore def recombine(self: P, *others: P) -> None: """Update value and parameters of this instance by combining it with @@ -432,6 +434,9 @@ def _layered_set_value(self, value: tp.Any) -> None: f'Constant value can only be updated to the same value (in this case "{self._value}")' ) + def _layered_sample(self: P) -> P: + return self + def get_standardized_data( # pylint: disable=unused-argument self: P, *, reference: tp.Optional[P] = None ) -> np.ndarray: diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index c4d2c8407..bdc506e4d 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.(an # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -150,9 +150,11 @@ def sigma(self) -> tp.Union["Array", "Scalar"]: """Value for the standard deviation used to mutate the parameter""" return self.parameters["sigma"] # type: ignore - def sample(self: D) -> D: + def _layered_sample(self: D) -> D: if not self.full_range_sampling: - return super().sample() + child = self.spawn_child() + child.mutate() + return child child = self.spawn_child() func = (lambda x: x) if self.exponent is None else self._to_reduced_space # noqa std_bounds = tuple(func(b * np.ones(self._value.shape)) for b in self.bounds) From 88856f05b536e9b93edb32ad4b371d18fad1d148 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Sun, 14 Feb 2021 17:16:55 +0100 Subject: [PATCH 36/39] wip --- nevergrad/parametrization/data.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index bdc506e4d..e7ec496cf 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -8,7 +8,7 @@ import numpy as np import nevergrad.common.typing as tp -# from nevergrad.common import errors +from nevergrad.common import errors from . import _layering from . import core from .container import Dict @@ -534,7 +534,7 @@ def __init__( self.set_bounds(lower, upper, full_range_sampling=bounded and no_init) -# LAYERS +# LAYERS # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # class BoundLayer(_layering.Layered): @@ -573,19 +573,19 @@ def __init__( if full_range_sampling is None: self.full_range_sampling = both_bounds - # def sample(self) -> "Data": - # if not self.full_range_sampling: - # super().sample() - # root = self._layers[0] - # if not isinstance(root, Data): - # raise errors.NevergradTypeError(f"BoundLayer {self} on a non-Data root {root}") - # child = root.spawn_child() - # shape = super()._layered_get_value().shape - # bounds = tuple(b * np.ones(shape) for b in self.bounds) - # diff = bounds[1] - bounds[0] - # super()._layered_set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) - # child.heritage["lineage"] = child.uid - # return child + def _layered_sample(self) -> "Data": + if not self.full_range_sampling: + return super()._layered_sample() # type: ignore + root = self._layers[0] + if not isinstance(root, Data): + raise errors.NevergradTypeError(f"BoundLayer {self} on a non-Data root {root}") + child = root.spawn_child() + shape = super()._layered_get_value().shape + bounds = tuple(b * np.ones(shape) for b in self.bounds) + diff = bounds[1] - bounds[0] + super()._layered_set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) + child.heritage["lineage"] = child.uid + return child class Modulo(BoundLayer): From 816a25424e37270d500ea612606c852e4b68aba8 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 15 Feb 2021 16:33:55 +0100 Subject: [PATCH 37/39] move_logic --- nevergrad/parametrization/_layering.py | 3 +++ nevergrad/parametrization/core.py | 2 -- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index dbff2d70f..1051f51fa 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -96,6 +96,9 @@ def copy(self: L) -> L: new = copy.copy(self) new._layers = [new] new._index = 0 + if not self._index: # attach sublayers if root + for layer in self._layers[1:]: + new.add_layer(layer.copy()) return new # naming capacity diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 3417818f4..78e72dad3 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -352,8 +352,6 @@ def copy(self: P) -> P: # layers if self is not self._layers[0]: raise errors.NevergradRuntimeError("Something has gone horribly wrong with the layers") - for layer in self._layers[1:]: - child.add_layer(layer.copy()) # subparameters attribute = self._subobjects.attribute container = getattr(child, attribute) From 6cb68aedbfa7a7c8702c7554039af6ef29cfbd6e Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 15 Feb 2021 16:56:40 +0100 Subject: [PATCH 38/39] fix --- nevergrad/parametrization/core.py | 2 +- nevergrad/parametrization/data.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 78e72dad3..4c847a709 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -336,7 +336,7 @@ def spawn_child(self: P, new_value: tp.Optional[tp.Any] = None) -> P: return child def copy(self: P) -> P: - """Creates a full copy of the parameter. + """Creates a full copy of the parameter (with new unique uid). Use spawn_child instead to make sure to add the parenthood information. """ child = super().copy() diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index 614e366b3..a2308fa9d 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -580,9 +580,9 @@ def _layered_sample(self) -> "Data": child = root.spawn_child() shape = super()._layered_get_value().shape bounds = tuple(b * np.ones(shape) for b in self.bounds) - diff = bounds[1] - bounds[0] - super()._layered_set_value(bounds[0] + root.random_state.uniform(0, 1, size=shape) * diff) - child.heritage["lineage"] = child.uid + new_val = root.random_state.uniform(*bounds) + # send new val to the layer under this one for the child + child._layers[self._index - 1]._layered_set_value(new_val) return child From fb8acf7b57b4d4c7e7f27ff88b464da9ccdaa12e Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 15 Feb 2021 17:07:37 +0100 Subject: [PATCH 39/39] nits --- CHANGELOG.md | 23 ++++++++++++++++++----- nevergrad/parametrization/_layering.py | 2 +- nevergrad/parametrization/core.py | 3 --- nevergrad/parametrization/data.py | 5 ++++- 4 files changed, 23 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 39ac13f71..c495f83a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,17 +2,30 @@ ## master +### Breaking changes + +- `copy()` method of a `Parameter` does not change the parameters's random state anymore (it used to reset it to `None` [#1048](https://github.com/facebookresearch/nevergrad/pull/1048) - `MultiobjectiveFunction` does not exist anymore [#1034](https://github.com/facebookresearch/nevergrad/pull/1034). -- the new `nevergrad.errors` module gathers errors and warnings used throughout the package (WIP) [#1031](https://github.com/facebookresearch/nevergrad/pull/1031). -- `EvolutionStrategy` now defaults to NSGA2 selection in the multiobjective case -- `Parameter` classes are undergoing heavy changes ( + +### Important changes + +- `Parameter` classes are undergoing heavy changes, please open an issue if you encounter any problem. + The midterm aim is to allow for simpler constraint management. +- `Parameter` have been updated have undergone heavy changes to ease the handling of their tree structure ( [#1029](https://github.com/facebookresearch/nevergrad/pull/1029) [#1036](https://github.com/facebookresearch/nevergrad/pull/1036) [#1038](https://github.com/facebookresearch/nevergrad/pull/1038) [#1043](https://github.com/facebookresearch/nevergrad/pull/1043) [#1044](https://github.com/facebookresearch/nevergrad/pull/1044) - and more to come), please open an issue if you encounter any problem. The midterm aim is to allow for simpler constraint management. -- `copy()` method of a `Parameter` does not change the parameters's random state anymore (it used to reset it to `None` [#1048](https://github.com/facebookresearch/nevergrad/pull/1048) + ) +- `Parameter` classes have now a layer structure [#1045](https://github.com/facebookresearch/nevergrad/pull/1045) + which simplifies changing their behavior. In future PRs this system will take charge of bounds, other constraints, + sampling etc. + +### Other changes + +- the new `nevergrad.errors` module gathers errors and warnings used throughout the package (WIP) [#1031](https://github.com/facebookresearch/nevergrad/pull/1031). +- `EvolutionStrategy` now defaults to NSGA2 selection in the multiobjective case ## 0.4.3 (2021-01-28) diff --git a/nevergrad/parametrization/_layering.py b/nevergrad/parametrization/_layering.py index 1051f51fa..127d82b09 100644 --- a/nevergrad/parametrization/_layering.py +++ b/nevergrad/parametrization/_layering.py @@ -86,7 +86,7 @@ def _layered_set_value(self, value: tp.Any) -> tp.Any: return self._call_deeper("_layered_set_value", value) def _layered_del_value(self) -> None: - self._call_deeper("_layered_del_value") + pass # called independently on each layer def _layered_sample(self) -> "Layered": return self._call_deeper("_layered_sample") # type: ignore diff --git a/nevergrad/parametrization/core.py b/nevergrad/parametrization/core.py index 4c847a709..95b10f451 100644 --- a/nevergrad/parametrization/core.py +++ b/nevergrad/parametrization/core.py @@ -107,9 +107,6 @@ def dimension(self) -> int: self._dimension = 0 return self._dimension - def _layered_del_value(self) -> None: - pass # used to remove cache, which Parameters should not have - def mutate(self) -> None: """Mutate parameters of the instance, and then its value""" self._check_frozen() diff --git a/nevergrad/parametrization/data.py b/nevergrad/parametrization/data.py index a2308fa9d..30dc0c624 100644 --- a/nevergrad/parametrization/data.py +++ b/nevergrad/parametrization/data.py @@ -546,6 +546,7 @@ def __init__( full_range_sampling: tp.Optional[bool] = None, ) -> None: """Bounds all real values into [lower, upper] + CAUTION: WIP Parameters ---------- @@ -587,7 +588,9 @@ def _layered_sample(self) -> "Data": class Modulo(BoundLayer): - """Cast Data as integer (or integer array)""" + """Cast Data as integer (or integer array) + CAUTION: WIP + """ def __init__(self, module: tp.Any) -> None: super().__init__(lower=0, upper=module)