From b88578d66ed62ce439a32a851038ce67106ff092 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 12:48:44 +0100 Subject: [PATCH 01/17] [WIP] Add MOO mode for 1+1 --- nevergrad/optimization/optimizerlib.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/nevergrad/optimization/optimizerlib.py b/nevergrad/optimization/optimizerlib.py index 0c6835561..a8fc78a2e 100644 --- a/nevergrad/optimization/optimizerlib.py +++ b/nevergrad/optimization/optimizerlib.py @@ -69,9 +69,11 @@ def __init__( noise_handling: tp.Optional[tp.Union[str, tp.Tuple[str, float]]] = None, mutation: str = "gaussian", crossover: bool = False, + use_pareto: bool = True, ) -> None: super().__init__(parametrization, budget=budget, num_workers=num_workers) self._sigma: float = 1 + self.use_pareto = use_pareto all_params = paramhelpers.flatten_parameter(self.parametrization) arity = max( len(param.choices) if isinstance(param, p.TransitionChoice) else 500 @@ -153,6 +155,9 @@ def _internal_ask_candidate(self) -> p.Parameter: # crossover mutator = mutations.Mutator(self._rng) pessimistic = self.current_bests["pessimistic"].parameter.spawn_child() + if self.num_objectives > 1 and self.use_pareto: # multiobjective + # revert to using a sample of the pareto front (not "pessimistic" though) + pessimistic = self._rng.choice(self.pareto_front()) ref = self.parametrization if self.crossover and self._num_ask % 2 == 1 and len(self.archive) > 2: data = mutator.crossover( @@ -293,6 +298,8 @@ class ParametrizedOnePlusOne(base.ConfiguredOptimizer): - `"lengler"`: specific mutation rate chosen as a function of the dimension and iteration index. crossover: bool whether to add a genetic crossover step every other iteration. + use_pareto: bool + whether to restart from a random pareto element in multiobjective mode, instead of the last one added Notes ----- @@ -310,6 +317,7 @@ def __init__( noise_handling: tp.Optional[tp.Union[str, tp.Tuple[str, float]]] = None, mutation: str = "gaussian", crossover: bool = False, + use_pareto: bool = True, ) -> None: super().__init__(_OnePlusOne, locals()) From e164c27a3ba76883b360cb24843563d0424ac51a Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 12:57:50 +0100 Subject: [PATCH 02/17] fix --- nevergrad/optimization/optimizerlib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nevergrad/optimization/optimizerlib.py b/nevergrad/optimization/optimizerlib.py index a8fc78a2e..3e891d526 100644 --- a/nevergrad/optimization/optimizerlib.py +++ b/nevergrad/optimization/optimizerlib.py @@ -157,7 +157,7 @@ def _internal_ask_candidate(self) -> p.Parameter: pessimistic = self.current_bests["pessimistic"].parameter.spawn_child() if self.num_objectives > 1 and self.use_pareto: # multiobjective # revert to using a sample of the pareto front (not "pessimistic" though) - pessimistic = self._rng.choice(self.pareto_front()) + pessimistic = self._rng.choice(self.pareto_front()).spawn_child() ref = self.parametrization if self.crossover and self._num_ask % 2 == 1 and len(self.archive) > 2: data = mutator.crossover( From f7e0e73e3593ec8b0c3b37a8a25849ba8f6da67f Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 14:21:50 +0100 Subject: [PATCH 03/17] fix --- nevergrad/optimization/base.py | 11 +++++++---- nevergrad/optimization/test_optimizerlib.py | 10 +++++++++- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/nevergrad/optimization/base.py b/nevergrad/optimization/base.py index f56fb95a4..5bc07ba57 100644 --- a/nevergrad/optimization/base.py +++ b/nevergrad/optimization/base.py @@ -213,11 +213,14 @@ def pareto_front( ---- During non-multiobjective optimization, this returns the current pessimistic best """ - if self._hypervolume_pareto is None: - return [self.provide_recommendation()] - return self._hypervolume_pareto.pareto_front( - size=size, subset=subset, subset_tentatives=subset_tentatives + pareto = ( + [] + if self._hypervolume_pareto is None + else self._hypervolume_pareto.pareto_front( + size=size, subset=subset, subset_tentatives=subset_tentatives + ) ) + return pareto if pareto else [self.provide_recommendation()] def dump(self, filepath: tp.Union[str, Path]) -> None: """Pickles the optimizer into a file.""" diff --git a/nevergrad/optimization/test_optimizerlib.py b/nevergrad/optimization/test_optimizerlib.py index 9f6e15fa2..f120638d5 100644 --- a/nevergrad/optimization/test_optimizerlib.py +++ b/nevergrad/optimization/test_optimizerlib.py @@ -715,7 +715,7 @@ def _multiobjective(z: np.ndarray) -> tp.Tuple[float, float, float]: return (abs(x - 1), abs(y + 1), abs(x - y)) -@pytest.mark.parametrize("name", ["DE", "ES"]) # type: ignore +@pytest.mark.parametrize("name", ["DE", "ES", "OnePlusOne"]) # type: ignore @testing.suppress_nevergrad_warnings() # hides bad loss def test_mo_constrained(name: str) -> None: optimizer = optlib.registry[name](2, budget=60) @@ -733,6 +733,14 @@ def constraint(arg: tp.Any) -> bool: # pylint: disable=unused-argument assert optimizer._rank_method is not None # make sure the nsga2 ranker is used +def test_mo_one_plus_one_with_nan() -> None: + optimizer = optlib.OnePlusOne(2, budget=30) + optimizer.tell(ng.p.MultiobjectiveReference(), [10, 10, 10]) + for _ in range(3): + cand = optimizer.ask() + optimizer.tell(cand, [-38, 0, np.nan]) + + def test_paraportfolio_de() -> None: workers = 40 opt = optlib.ParaPortfolio(12, budget=100 * workers, num_workers=workers) From 0ffc33577a999e0af34b7c9036a074a9adc527df Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 16:01:59 +0100 Subject: [PATCH 04/17] num --- nevergrad/benchmark/test_xpbase.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/nevergrad/benchmark/test_xpbase.py b/nevergrad/benchmark/test_xpbase.py index d4c99ac52..da5de8835 100644 --- a/nevergrad/benchmark/test_xpbase.py +++ b/nevergrad/benchmark/test_xpbase.py @@ -48,9 +48,7 @@ def test_run_packed_artificial_function() -> None: ) xp = xpbase.Experiment(func, optimizer="OnePlusOne", budget=24, num_workers=2, batch_mode=True, seed=14) summary = xp.run() - np.testing.assert_almost_equal( - summary["loss"], -9784.829729792353, decimal=1 - ) # makes sure seeding works! + np.testing.assert_almost_equal(summary["loss"], -9726.2, decimal=1) # makes sure seeding works! def test_noisy_artificial_function_loss() -> None: From 1651d7039c783aaafcc5ed4a22b082b96ab31abf Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 17:55:15 +0100 Subject: [PATCH 05/17] fixes --- .../optimization/differentialevolution.py | 31 +++++++++++-------- nevergrad/optimization/optimizerlib.py | 5 ++- nevergrad/optimization/test_optimizerlib.py | 9 ++++-- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/nevergrad/optimization/differentialevolution.py b/nevergrad/optimization/differentialevolution.py index 4baf975d7..2a1a1e9e1 100644 --- a/nevergrad/optimization/differentialevolution.py +++ b/nevergrad/optimization/differentialevolution.py @@ -117,18 +117,21 @@ def recommend(self) -> p.Parameter: # This is NOT the naive version. We deal wi def _internal_ask_candidate(self) -> p.Parameter: if len(self.population) < self.llambda: # initialization phase init = self._config.initialization - if self.sampler is None and init != "gaussian": + if self.sampler is None and init not in ["gaussian", "parametrization"]: assert init in ["LHS", "QR"] sampler_cls = sequences.LHSSampler if init == "LHS" else sequences.HammersleySampler self.sampler = sampler_cls( self.dimension, budget=self.llambda, scrambling=init == "QR", random_state=self._rng ) - new_guy = self.scale * ( - self._rng.normal(0, 1, self.dimension) - if self.sampler is None - else stats.norm.ppf(self.sampler()) - ) - candidate = self.parametrization.spawn_child().set_standardized_data(new_guy) + if self.sampler == "parametrization": + candidate = self.parametrization.sample() + else: + new_guy = self.scale * ( + self._rng.normal(0, 1, self.dimension) + if self.sampler is None + else stats.norm.ppf(self.sampler()) + ) + candidate = self.parametrization.spawn_child().set_standardized_data(new_guy) candidate.heritage["lineage"] = candidate.uid # new lineage self.population[candidate.uid] = candidate self._uid_queue.asked.add(candidate.uid) @@ -146,10 +149,11 @@ def _internal_ask_candidate(self) -> p.Parameter: # redefine the different parents in case of multiobjective optimization if self._config.multiobjective_adaptation and self.num_objectives > 1: pareto = self.pareto_front() + # can't use choice directly on pareto, because parametrization can be iterable if pareto: - best = parent if parent in pareto else self._rng.choice(pareto) + best = parent if parent in pareto else pareto[self._rng.choice(len(pareto))] if len(pareto) > 2: # otherwise, not enough diversity - a, b = self._rng.choice(pareto, size=2, replace=False) + a, b = (pareto[idx] for idx in self._rng.choice(len(pareto), size=2, replace=False)) # define donor data_a, data_b, data_best = ( indiv.get_standardized_data(reference=self.parametrization) for indiv in (a, b, best) @@ -228,8 +232,9 @@ class DifferentialEvolution(base.ConfiguredOptimizer): Parameters ---------- - initialization: "LHS", "QR" or "gaussian" - algorithm/distribution used for the initialization phase + initialization: "parametrization", "LHS" or "QR" + algorithm/distribution used for the initialization phase. If "parametrization", this uses the + sample method of the parametrization. scale: float or str scale of random component of the updates recommendation: "pessimistic", "optimistic", "mean" or "noisy" @@ -256,7 +261,7 @@ class DifferentialEvolution(base.ConfiguredOptimizer): def __init__( self, *, - initialization: str = "gaussian", + initialization: str = "parametrization", scale: tp.Union[str, float] = 1.0, recommendation: str = "optimistic", crossover: tp.Union[str, float] = 0.5, @@ -268,7 +273,7 @@ def __init__( ) -> None: super().__init__(_DE, locals(), as_config=True) assert recommendation in ["optimistic", "pessimistic", "noisy", "mean"] - assert initialization in ["gaussian", "LHS", "QR"] + assert initialization in ["gaussian", "LHS", "QR", "parametrization"] assert isinstance(scale, float) or scale == "mini" if not isinstance(popsize, int): assert popsize in ["large", "dimension", "standard"] diff --git a/nevergrad/optimization/optimizerlib.py b/nevergrad/optimization/optimizerlib.py index 3e891d526..e85c95b7d 100644 --- a/nevergrad/optimization/optimizerlib.py +++ b/nevergrad/optimization/optimizerlib.py @@ -157,7 +157,10 @@ def _internal_ask_candidate(self) -> p.Parameter: pessimistic = self.current_bests["pessimistic"].parameter.spawn_child() if self.num_objectives > 1 and self.use_pareto: # multiobjective # revert to using a sample of the pareto front (not "pessimistic" though) - pessimistic = self._rng.choice(self.pareto_front()).spawn_child() + pareto = ( + self.pareto_front() + ) # we can't use choice directly, because numpy does not like iterables + pessimistic = pareto[self._rng.choice(len(pareto))].spawn_child() ref = self.parametrization if self.crossover and self._num_ask % 2 == 1 and len(self.archive) > 2: data = mutator.crossover( diff --git a/nevergrad/optimization/test_optimizerlib.py b/nevergrad/optimization/test_optimizerlib.py index f120638d5..b26ddb277 100644 --- a/nevergrad/optimization/test_optimizerlib.py +++ b/nevergrad/optimization/test_optimizerlib.py @@ -733,10 +733,13 @@ def constraint(arg: tp.Any) -> bool: # pylint: disable=unused-argument assert optimizer._rank_method is not None # make sure the nsga2 ranker is used -def test_mo_one_plus_one_with_nan() -> None: - optimizer = optlib.OnePlusOne(2, budget=30) +@pytest.mark.parametrize("name", ["DE", "ES", "OnePlusOne"]) # type: ignore +@testing.suppress_nevergrad_warnings() # hides bad loss +def test_mo_with_nan(name: str) -> None: + param = ng.p.Instrumentation(x=ng.p.Scalar(lower=0, upper=5), y=ng.p.Scalar(lower=0, upper=3)) + optimizer = optlib.registry[name](param, budget=60) optimizer.tell(ng.p.MultiobjectiveReference(), [10, 10, 10]) - for _ in range(3): + for _ in range(50): cand = optimizer.ask() optimizer.tell(cand, [-38, 0, np.nan]) From 361b8b47c765fb6cfded23d5757a543bdf35358b Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 18:06:24 +0100 Subject: [PATCH 06/17] fix --- nevergrad/optimization/differentialevolution.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nevergrad/optimization/differentialevolution.py b/nevergrad/optimization/differentialevolution.py index 2a1a1e9e1..c79f2918b 100644 --- a/nevergrad/optimization/differentialevolution.py +++ b/nevergrad/optimization/differentialevolution.py @@ -123,7 +123,7 @@ def _internal_ask_candidate(self) -> p.Parameter: self.sampler = sampler_cls( self.dimension, budget=self.llambda, scrambling=init == "QR", random_state=self._rng ) - if self.sampler == "parametrization": + if init == "parametrization": candidate = self.parametrization.sample() else: new_guy = self.scale * ( From 8f6886b0b3a299cbb664f71f33c6676b9254f1e9 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 18:50:37 +0100 Subject: [PATCH 07/17] fix --- .../optimization/differentialevolution.py | 20 ++++++++----------- nevergrad/optimization/test_optimizerlib.py | 9 +++++++++ 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/nevergrad/optimization/differentialevolution.py b/nevergrad/optimization/differentialevolution.py index c79f2918b..a96913c04 100644 --- a/nevergrad/optimization/differentialevolution.py +++ b/nevergrad/optimization/differentialevolution.py @@ -5,11 +5,10 @@ import warnings import numpy as np -from scipy import stats import nevergrad.common.typing as tp from nevergrad.parametrization import parameter as p from . import base -from . import sequences +from . import oneshot class Crossover: @@ -100,7 +99,7 @@ def __init__( self._penalize_cheap_violations = True self._uid_queue = base.utils.UidQueue() self.population: tp.Dict[str, p.Parameter] = {} - self.sampler: tp.Optional[sequences.Sampler] = None + self.sampler: tp.Optional[base.Optimizer] = None def recommend(self) -> p.Parameter: # This is NOT the naive version. We deal with noise. if self._config.recommendation != "noisy": @@ -119,18 +118,15 @@ def _internal_ask_candidate(self) -> p.Parameter: init = self._config.initialization if self.sampler is None and init not in ["gaussian", "parametrization"]: assert init in ["LHS", "QR"] - sampler_cls = sequences.LHSSampler if init == "LHS" else sequences.HammersleySampler - self.sampler = sampler_cls( - self.dimension, budget=self.llambda, scrambling=init == "QR", random_state=self._rng - ) + self.sampler = oneshot.SamplingSearch( + sampler=init if init == "LHS" else "Hammersley", scrambled=init == "QR" + )(self.parametrization, budget=self.llambda) if init == "parametrization": candidate = self.parametrization.sample() + elif self.sampler is not None: + candidate = self.sampler.ask() else: - new_guy = self.scale * ( - self._rng.normal(0, 1, self.dimension) - if self.sampler is None - else stats.norm.ppf(self.sampler()) - ) + new_guy = self.scale * self._rng.normal(0, 1, self.dimension) candidate = self.parametrization.spawn_child().set_standardized_data(new_guy) candidate.heritage["lineage"] = candidate.uid # new lineage self.population[candidate.uid] = candidate diff --git a/nevergrad/optimization/test_optimizerlib.py b/nevergrad/optimization/test_optimizerlib.py index b26ddb277..647837bd0 100644 --- a/nevergrad/optimization/test_optimizerlib.py +++ b/nevergrad/optimization/test_optimizerlib.py @@ -744,6 +744,15 @@ def test_mo_with_nan(name: str) -> None: optimizer.tell(cand, [-38, 0, np.nan]) +def test_de_sampling() -> None: + param = ng.p.Scalar(lower=-100, upper=100).set_mutation(sigma=1) + opt = optlib.LhsDE(param, budget=600, num_workers=100) + above_50 = 0 + for _ in range(100): + above_50 += abs(opt.ask().value) > 50 + assert above_50 > 20 # should be around 50 + + def test_paraportfolio_de() -> None: workers = 40 opt = optlib.ParaPortfolio(12, budget=100 * workers, num_workers=workers) From 8544074d1d0628d7ad5da307cdaea24da278f436 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 19:25:17 +0100 Subject: [PATCH 08/17] skip --- nevergrad/benchmark/experiments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nevergrad/benchmark/experiments.py b/nevergrad/benchmark/experiments.py index ec2012316..9ce60e62d 100644 --- a/nevergrad/benchmark/experiments.py +++ b/nevergrad/benchmark/experiments.py @@ -1272,6 +1272,7 @@ def image_similarity( """Optimizing images: artificial criterion for now.""" seedg = create_seed_generator(seed) optims = get_optimizers("structured_moo", seed=next(seedg)) + skip_ci(reason="too slow") funcs: tp.List[ExperimentFunction] = [ imagesxp.Image(loss=loss, with_pgan=with_pgan) for loss in imagesxp.imagelosses.registry.values() @@ -1281,7 +1282,6 @@ def image_similarity( for func in funcs: for algo in optims: xp = Experiment(func, algo, budget, num_workers=1, seed=next(seedg)) - skip_ci(reason="too slow") if not xp.is_incoherent: yield xp From e8a13c63d045d4cc0c738f8ea437c0e3ed766080 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Mon, 22 Feb 2021 19:30:02 +0100 Subject: [PATCH 09/17] skip --- nevergrad/benchmark/experiments.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nevergrad/benchmark/experiments.py b/nevergrad/benchmark/experiments.py index 9ce60e62d..43b430f2c 100644 --- a/nevergrad/benchmark/experiments.py +++ b/nevergrad/benchmark/experiments.py @@ -1272,7 +1272,6 @@ def image_similarity( """Optimizing images: artificial criterion for now.""" seedg = create_seed_generator(seed) optims = get_optimizers("structured_moo", seed=next(seedg)) - skip_ci(reason="too slow") funcs: tp.List[ExperimentFunction] = [ imagesxp.Image(loss=loss, with_pgan=with_pgan) for loss in imagesxp.imagelosses.registry.values() @@ -1282,6 +1281,7 @@ def image_similarity( for func in funcs: for algo in optims: xp = Experiment(func, algo, budget, num_workers=1, seed=next(seedg)) + skip_ci(reason="too slow") if not xp.is_incoherent: yield xp @@ -1491,6 +1491,7 @@ def images_using_gan(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Optimizing an image using koncept512 and a GAN""" seedg = create_seed_generator(seed) optims = get_optimizers("structured_moo", seed=next(seedg)) + skip_ci(reason="too slow") func = imagesxp.ImageFromPGAN() num_workers = 1 for budget in [100 * 5 ** k for k in range(3)]: From e4d217365d07bb06676deb8ac07e8fb51cddc3bc Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Tue, 23 Feb 2021 11:55:48 +0100 Subject: [PATCH 10/17] skip --- nevergrad/functions/images/core.py | 8 ++++++-- nevergrad/optimization/differentialevolution.py | 7 +++++-- nevergrad/optimization/oneshot.py | 8 +++----- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/nevergrad/functions/images/core.py b/nevergrad/functions/images/core.py index 4c4a4430f..0ede7b5cd 100644 --- a/nevergrad/functions/images/core.py +++ b/nevergrad/functions/images/core.py @@ -3,10 +3,11 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import cv2 -from pathlib import Path +import os import itertools +from pathlib import Path +import cv2 import numpy as np import PIL.Image import torch.nn as nn @@ -17,6 +18,7 @@ import nevergrad as ng import nevergrad.common.typing as tp +from nevergrad.common import errors from .. import base from . import imagelosses @@ -259,6 +261,8 @@ def __init__( if not torch.cuda.is_available(): use_gpu = False # Storing high level information.. + if os.environ("CIRCLECI", False): + raise errors.UnsupportedExperiment("ImageFromPGAN is not well supported in CircleCI") self.pgan_model = torch.hub.load( "facebookresearch/pytorch_GAN_zoo:hub", "PGAN", diff --git a/nevergrad/optimization/differentialevolution.py b/nevergrad/optimization/differentialevolution.py index a96913c04..abdfc2798 100644 --- a/nevergrad/optimization/differentialevolution.py +++ b/nevergrad/optimization/differentialevolution.py @@ -119,8 +119,11 @@ def _internal_ask_candidate(self) -> p.Parameter: if self.sampler is None and init not in ["gaussian", "parametrization"]: assert init in ["LHS", "QR"] self.sampler = oneshot.SamplingSearch( - sampler=init if init == "LHS" else "Hammersley", scrambled=init == "QR" - )(self.parametrization, budget=self.llambda) + sampler=init if init == "LHS" else "Hammersley", scrambled=init == "QR", scale=self.scale + )( + self.parametrization, + budget=self.llambda, + ) if init == "parametrization": candidate = self.parametrization.sample() elif self.sampler is not None: diff --git a/nevergrad/optimization/oneshot.py b/nevergrad/optimization/oneshot.py index eb4892ebc..8696386b3 100644 --- a/nevergrad/optimization/oneshot.py +++ b/nevergrad/optimization/oneshot.py @@ -75,14 +75,12 @@ def avg_of_k_best(archive: utils.Archive[utils.MultiValue], method: str = "dimfo raise ValueError(f"{method} not implemented as a method for choosing k in avg_of_k_best.") k = 1 if k < 1 else int(k) # Wasted time. - first_k_individuals = [ - k for k in sorted(items, key=lambda indiv: archive[indiv[0]].get_estimation("pessimistic"))[:k] - ] + first_k_individuals = sorted(items, key=lambda indiv: archive[indiv[0]].get_estimation("pessimistic"))[:k] assert len(first_k_individuals) == k return np.array(sum(p[0] for p in first_k_individuals) / k) -# # # # # classes of optimizers # # # # # +# # # # # classes of optimizers # # # # # class OneShotOptimizer(base.Optimizer): @@ -99,7 +97,7 @@ class OneShotOptimizer(base.Optimizer): # - Some variants use a rescaling depending on the budget and the dimension. -# # # # # One-shot optimizers: all fitness evaluations are in parallel. # # # # # +# # # # # One-shot optimizers: all fitness evaluations are in parallel. # # # # # # pylint: disable=too-many-arguments,too-many-instance-attributes From 85ba54119c231f904b2277252aea313e310c51b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9my=20Rapin?= Date: Tue, 23 Feb 2021 11:56:47 +0100 Subject: [PATCH 11/17] Update nevergrad/benchmark/experiments.py --- nevergrad/benchmark/experiments.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nevergrad/benchmark/experiments.py b/nevergrad/benchmark/experiments.py index 43b430f2c..ec2012316 100644 --- a/nevergrad/benchmark/experiments.py +++ b/nevergrad/benchmark/experiments.py @@ -1491,7 +1491,6 @@ def images_using_gan(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Optimizing an image using koncept512 and a GAN""" seedg = create_seed_generator(seed) optims = get_optimizers("structured_moo", seed=next(seedg)) - skip_ci(reason="too slow") func = imagesxp.ImageFromPGAN() num_workers = 1 for budget in [100 * 5 ** k for k in range(3)]: From bb8fe038f40f6f9fae998021f7d1169410da88a1 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Tue, 23 Feb 2021 12:02:28 +0100 Subject: [PATCH 12/17] fix --- nevergrad/functions/images/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nevergrad/functions/images/core.py b/nevergrad/functions/images/core.py index 0ede7b5cd..16d1b87b7 100644 --- a/nevergrad/functions/images/core.py +++ b/nevergrad/functions/images/core.py @@ -261,7 +261,7 @@ def __init__( if not torch.cuda.is_available(): use_gpu = False # Storing high level information.. - if os.environ("CIRCLECI", False): + if os.environ.get("CIRCLECI", False): raise errors.UnsupportedExperiment("ImageFromPGAN is not well supported in CircleCI") self.pgan_model = torch.hub.load( "facebookresearch/pytorch_GAN_zoo:hub", From 6811a4cadc0e8781d7e081eb0511672c874d1cb0 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Tue, 23 Feb 2021 12:14:47 +0100 Subject: [PATCH 13/17] fix --- nevergrad/optimization/experimentalvariants.py | 2 +- nevergrad/optimization/optimizerlib.py | 4 ++-- nevergrad/optimization/test_callbacks.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/nevergrad/optimization/experimentalvariants.py b/nevergrad/optimization/experimentalvariants.py index 72a31d75f..14f13009c 100644 --- a/nevergrad/optimization/experimentalvariants.py +++ b/nevergrad/optimization/experimentalvariants.py @@ -22,7 +22,7 @@ ParametrizationDE = DifferentialEvolution(crossover="parametrization").set_name( "ParametrizationDE", register=True ) -MiniDE = DifferentialEvolution(scale="mini").set_name("MiniDE", register=True) +MiniDE = DifferentialEvolution(initialization="gaussian", scale="mini").set_name("MiniDE", register=True) MiniLhsDE = DifferentialEvolution(initialization="LHS", scale="mini").set_name("MiniLhsDE", register=True) MiniQrDE = DifferentialEvolution(initialization="QR", scale="mini").set_name("MiniQrDE", register=True) AlmostRotationInvariantDEAndBigPop = DifferentialEvolution(crossover=0.9, popsize="dimension").set_name( diff --git a/nevergrad/optimization/optimizerlib.py b/nevergrad/optimization/optimizerlib.py index e85c95b7d..cec42244c 100644 --- a/nevergrad/optimization/optimizerlib.py +++ b/nevergrad/optimization/optimizerlib.py @@ -69,7 +69,7 @@ def __init__( noise_handling: tp.Optional[tp.Union[str, tp.Tuple[str, float]]] = None, mutation: str = "gaussian", crossover: bool = False, - use_pareto: bool = True, + use_pareto: bool = False, ) -> None: super().__init__(parametrization, budget=budget, num_workers=num_workers) self._sigma: float = 1 @@ -320,7 +320,7 @@ def __init__( noise_handling: tp.Optional[tp.Union[str, tp.Tuple[str, float]]] = None, mutation: str = "gaussian", crossover: bool = False, - use_pareto: bool = True, + use_pareto: bool = False, ) -> None: super().__init__(_OnePlusOne, locals()) diff --git a/nevergrad/optimization/test_callbacks.py b/nevergrad/optimization/test_callbacks.py index 33f1b0ab6..178118795 100644 --- a/nevergrad/optimization/test_callbacks.py +++ b/nevergrad/optimization/test_callbacks.py @@ -33,9 +33,9 @@ def test_log_parameters(tmp_path: Path) -> None: logs = logger.load_flattened() assert len(logs) == 32 assert isinstance(logs[-1]["1"], float) - assert len(logs[-1]) == 35 + assert len(logs[-1]) == 36 logs = logger.load_flattened(max_list_elements=2) - assert len(logs[-1]) == 27 + assert len(logs[-1]) == 28 # deletion logger = callbacks.ParametersLogger(filepath, append=False) assert not logger.load() From a81b6ba139e5cb31a4064c92de771ebd28b6100c Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Tue, 23 Feb 2021 12:34:18 +0100 Subject: [PATCH 14/17] fix --- nevergrad/benchmark/test_xpbase.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nevergrad/benchmark/test_xpbase.py b/nevergrad/benchmark/test_xpbase.py index da5de8835..dbf35ed45 100644 --- a/nevergrad/benchmark/test_xpbase.py +++ b/nevergrad/benchmark/test_xpbase.py @@ -48,7 +48,7 @@ def test_run_packed_artificial_function() -> None: ) xp = xpbase.Experiment(func, optimizer="OnePlusOne", budget=24, num_workers=2, batch_mode=True, seed=14) summary = xp.run() - np.testing.assert_almost_equal(summary["loss"], -9726.2, decimal=1) # makes sure seeding works! + np.testing.assert_almost_equal(summary["loss"], -9784.8, decimal=1) # makes sure seeding works! def test_noisy_artificial_function_loss() -> None: From 449539cea4a1bc95f776d84d9f1177122daa662f Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Tue, 23 Feb 2021 12:38:34 +0100 Subject: [PATCH 15/17] changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d95abc5b1..c12a89f37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ - `Parameter` classes have now a layer structure [#1045](https://github.com/facebookresearch/nevergrad/pull/1045) which simplifies changing their behavior. In future PRs this system will take charge of bounds, other constraints, sampling etc. +- `DE` initial sampling as been updated to take bounds into accounts [#1058](https://github.com/facebookresearch/nevergrad/pull/1058) ### Other changes From e9c477fcd2b604da2e19e3b1026ff8454b3dd563 Mon Sep 17 00:00:00 2001 From: Jeremy Rapin Date: Tue, 23 Feb 2021 12:52:55 +0100 Subject: [PATCH 16/17] fix --- nevergrad/optimization/base.py | 2 ++ nevergrad/optimization/multiobjective/core.py | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/nevergrad/optimization/base.py b/nevergrad/optimization/base.py index 5bc07ba57..a1cec4aef 100644 --- a/nevergrad/optimization/base.py +++ b/nevergrad/optimization/base.py @@ -220,6 +220,8 @@ def pareto_front( size=size, subset=subset, subset_tentatives=subset_tentatives ) ) + print(pareto) + print(type(pareto)) return pareto if pareto else [self.provide_recommendation()] def dump(self, filepath: tp.Union[str, Path]) -> None: diff --git a/nevergrad/optimization/multiobjective/core.py b/nevergrad/optimization/multiobjective/core.py index 88ffc4d55..ba4ef4c27 100644 --- a/nevergrad/optimization/multiobjective/core.py +++ b/nevergrad/optimization/multiobjective/core.py @@ -166,8 +166,8 @@ def pareto_front( if size is None or size >= len(self._pareto): # No limit: we return the full set. return self._pareto if subset == "random": - return self._rng.choice(self._pareto, size) # type: ignore - tentatives = [self._rng.choice(self._pareto, size) for _ in range(subset_tentatives)] + return self._rng.choice(self._pareto, size).tolist() # type: ignore + tentatives = [self._rng.choice(self._pareto, size).tolist() for _ in range(subset_tentatives)] if self._hypervolume is None: raise RuntimeError("Hypervolume not initialized, not supported") # TODO fix hypervolume = self._hypervolume @@ -192,4 +192,4 @@ def pareto_front( raise ValueError(f'Unknown subset for Pareto-Set subsampling: "{subset}"') score += best_score ** 2 if subset != "EPS" else max(score, best_score) scores += [score] - return tentatives[scores.index(min(scores))] + return tentatives[scores.index(min(scores))] # type: ignore From 917eea876f0545e6b8f009d024a13f2fd73443de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9my=20Rapin?= Date: Tue, 23 Feb 2021 13:06:41 +0100 Subject: [PATCH 17/17] Update nevergrad/optimization/base.py --- nevergrad/optimization/base.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/nevergrad/optimization/base.py b/nevergrad/optimization/base.py index a1cec4aef..5bc07ba57 100644 --- a/nevergrad/optimization/base.py +++ b/nevergrad/optimization/base.py @@ -220,8 +220,6 @@ def pareto_front( size=size, subset=subset, subset_tentatives=subset_tentatives ) ) - print(pareto) - print(type(pareto)) return pareto if pareto else [self.provide_recommendation()] def dump(self, filepath: tp.Union[str, Path]) -> None: