diff --git a/CHANGELOG.md b/CHANGELOG.md index 5b1b33d441..2046d18035 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## main +## v0.8.0 (3034-07-03) +- Add the Dagstuhloid benchmark +- Add yet another group of metamodels + ## v0.7.0 (2023-06-16) - Fix links - Add metamodels diff --git a/nevergrad/__init__.py b/nevergrad/__init__.py index 1388709cd1..4e4711349a 100644 --- a/nevergrad/__init__.py +++ b/nevergrad/__init__.py @@ -15,4 +15,4 @@ __all__ = ["optimizers", "families", "callbacks", "p", "typing", "errors", "ops"] -__version__ = "0.7.0" +__version__ = "0.8.0" diff --git a/nevergrad/benchmark/experiments.py b/nevergrad/benchmark/experiments.py index 890438a3a8..812264cabf 100644 --- a/nevergrad/benchmark/experiments.py +++ b/nevergrad/benchmark/experiments.py @@ -79,15 +79,45 @@ def __call__(self, data: np.ndarray) -> tp.Union[bool, float]: return value > 0 if self.as_bool else value +@registry.register def keras_tuning( - seed: tp.Optional[int] = None, overfitter: bool = False, seq: bool = False + seed: tp.Optional[int] = None, + overfitter: bool = False, + seq: bool = False, + veryseq: bool = False, ) -> tp.Iterator[Experiment]: """Machine learning hyperparameter tuning experiment. Based on Keras models.""" seedg = create_seed_generator(seed) # Continuous case, # First, a few functions with constraints. - optims: tp.List[str] = ["PSO", "OnePlusOne"] + get_optimizers("basics", seed=next(seedg)) # type: ignore + # optims: tp.List[str] = ["PSO", "OnePlusOne"] + get_optimizers("basics", seed=next(seedg)) # type: ignore + optims = ["OnePlusOne", "BO", "RandomSearch", "CMA", "DE", "TwoPointsDE", "HyperOpt", "PCABO", "Cobyla"] + optims = [ + "OnePlusOne", + "RandomSearch", + "CMA", + "DE", + "TwoPointsDE", + "HyperOpt", + "Cobyla", + "MetaModel", + "MetaModelOnePlusOne", + "RFMetaModel", + "RFMetaModelOnePlusOne", + ] + optims = ["OnePlusOne", "RandomSearch", "Cobyla"] + optims = ["DE", "TwoPointsDE", "HyperOpt", "MetaModelOnePlusOne"] + optims = get_optimizers("oneshot", seed=next(seedg)) # type: ignore + optims = [ + "MetaTuneRecentering", + "MetaRecentering", + "HullCenterHullAvgCauchyScrHammersleySearch", + "LHSSearch", + "LHSCauchySearch", + ] + optims = ["NGOpt", "NGOptRW", "QODE"] + optims = ["NGOpt"] datasets = ["kerasBoston", "diabetes", "auto-mpg", "red-wine", "white-wine"] for dimension in [None]: for dataset in datasets: @@ -98,6 +128,8 @@ def keras_tuning( for num_workers in ( [1, budget // 4] if seq else [budget] ): # Seq for sequential optimization experiments. + if veryseq and num_workers > 1: + continue for optim in optims: xp = Experiment( function, optim, num_workers=num_workers, budget=budget, seed=next(seedg) @@ -107,17 +139,45 @@ def keras_tuning( yield xp +@registry.register def mltuning( seed: tp.Optional[int] = None, overfitter: bool = False, seq: bool = False, + veryseq: bool = False, nano: bool = False, ) -> tp.Iterator[Experiment]: """Machine learning hyperparameter tuning experiment. Based on scikit models.""" seedg = create_seed_generator(seed) - optims: tp.List[str] = get_optimizers("basics", seed=next(seedg)) # type: ignore - if not seq: - optims = get_optimizers("oneshot", seed=next(seedg)) # type: ignore + # optims: tp.List[str] = get_optimizers("basics", seed=next(seedg)) # type: ignore + # if not seq: + # optims = get_optimizers("oneshot", seed=next(seedg)) # type: ignore + optims = ["OnePlusOne", "BO", "RandomSearch", "CMA", "DE", "TwoPointsDE", "PCABO", "HyperOpt", "Cobyla"] + optims = [ + "OnePlusOne", + "RandomSearch", + "CMA", + "DE", + "TwoPointsDE", + "HyperOpt", + "Cobyla", + "MetaModel", + "MetaModelOnePlusOne", + "RFMetaModel", + "RFMetaModelOnePlusOne", + ] + optims = ["OnePlusOne", "RandomSearch", "Cobyla"] + optims = ["DE", "TwoPointsDE", "HyperOpt", "MetaModelOnePlusOne"] + optims = get_optimizers("oneshot", seed=next(seedg)) # type: ignore + optims = [ + "MetaTuneRecentering", + "MetaRecentering", + "HullCenterHullAvgCauchyScrHammersleySearch", + "LHSSearch", + "LHSCauchySearch", + ] + optims = ["NGOpt", "NGOptRW", "QODE"] + optims = ["NGOpt"] for dimension in [None, 1, 2, 3]: if dimension is None: datasets = ["boston", "diabetes", "auto-mpg", "red-wine", "white-wine"] @@ -132,6 +192,8 @@ def mltuning( # Seq for sequential optimization experiments. parallelization = [1, budget // 4] if seq else [budget] for num_workers in parallelization: + if veryseq and num_workers > 1: + continue for optim in optims: xp = Experiment( @@ -142,25 +204,36 @@ def mltuning( yield xp +@registry.register def naivemltuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Counterpart of mltuning with overfitting of valid loss, i.e. train/valid/valid instead of train/valid/test.""" return mltuning(seed, overfitter=True) -# We register only the sequential counterparts for the moment. +@registry.register +def veryseq_keras_tuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Iterative counterpart of keras tuning.""" + return keras_tuning(seed, overfitter=False, seq=True, veryseq=True) + + @registry.register def seq_keras_tuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Iterative counterpart of keras tuning.""" return keras_tuning(seed, overfitter=False, seq=True) -# We register only the sequential counterparts for the moment. @registry.register def naive_seq_keras_tuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Naive counterpart (no overfitting, see naivemltuning)of seq_keras_tuning.""" return keras_tuning(seed, overfitter=True, seq=True) +@registry.register +def naive_veryseq_keras_tuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Naive counterpart (no overfitting, see naivemltuning)of seq_keras_tuning.""" + return keras_tuning(seed, overfitter=True, seq=True, veryseq=True) + + @registry.register def oneshot_mltuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """One-shot counterpart of Scikit tuning.""" @@ -180,6 +253,19 @@ def nano_seq_mltuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: return mltuning(seed, overfitter=False, seq=True, nano=True) +@registry.register +def nano_veryseq_mltuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Iterative counterpart of seq_mltuning with smaller budget.""" + return mltuning(seed, overfitter=False, seq=True, nano=True, veryseq=True) + + +@registry.register +def nano_naive_veryseq_mltuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Iterative counterpart of mltuning with overfitting of valid loss, i.e. train/valid/valid instead of train/valid/test, + and with lower budget.""" + return mltuning(seed, overfitter=True, seq=True, nano=True, veryseq=True) + + @registry.register def nano_naive_seq_mltuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Iterative counterpart of mltuning with overfitting of valid loss, i.e. train/valid/valid instead of train/valid/test, @@ -243,10 +329,9 @@ def yawidebbob(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: assert len(functions) == 21, f"{len(functions)} problems instead of 21. Yawidebbob should be standard." # This problem is intended as a stable basis forever. # The list of optimizers should contain only the basic for comparison and "baselines". - optims: tp.List[str] = ["NGOpt10"] + get_optimizers("baselines", seed=next(seedg)) # type: ignore - optims = ["NGOptRW", "NGOpt", "CMandAS2", "Shiwa", "CMA", "DE", "DiscreteLenglerOnePlusOne"] - np.random.shuffle(optims) - optims = optims[:2] + # optims: tp.List[str] = ["NGOpt10"] + get_optimizers("baselines", seed=next(seedg)) # type: ignore + optims = ["NGOptRW", "NGOpt", "RandomSearch", "CMA", "DE", "DiscreteLenglerOnePlusOne"] + # optims = optims[:2] index = 0 for function in functions: for budget in [50, 1500, 25000]: @@ -330,7 +415,8 @@ def yawidebbob(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: def parallel_small_budget(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Parallel optimization with small budgets""" seedg = create_seed_generator(seed) - optims: tp.List[str] = get_optimizers("basics", seed=next(seedg)) # type: ignore + # optims: tp.List[str] = get_optimizers("basics", seed=next(seedg)) # type: ignore + optims = ["DE", "TwoPointsDE", "CMA", "NGOpt", "PSO", "OnePlusOne", "RandomSearch"] names = ["hm", "rastrigin", "griewank", "rosenbrock", "ackley", "multipeak"] names += ["sphere", "cigar", "ellipsoid", "altellipsoid"] names += ["deceptiveillcond", "deceptivemultimodal", "deceptivepath"] @@ -367,7 +453,14 @@ def instrum_discrete(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: # Discrete, unordered. seedg = create_seed_generator(seed) - optims = get_optimizers("small_discrete", seed=next(seedg)) + # optims = get_optimizers("small_discrete", seed=next(seedg)) + optims = ["DiscreteOnePlusOne", "NGOpt", "CMA", "TwoPointsDE", "DiscreteLenglerOnePlusOne"] + optims = ["RFMetaModelOnePlusOne"] + optims = ["FastGADiscreteOnePlusOne"] + optims = ["DoubleFastGADiscreteOnePlusOne"] + optims = ["DiscreteOnePlusOne"] + optims = ["OnePlusOne"] + optims = ["DiscreteLenglerOnePlusOne"] for nv in [10, 50, 200, 1000, 5000]: for arity in [2, 3, 7, 30]: for instrum_str in ["Unordered", "Softmax", "Ordered"]: @@ -383,6 +476,8 @@ def instrum_discrete(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: corefuncs.DiscreteFunction(name, arity), instrum.set_name(instrum_str) ) dfunc.add_descriptors(arity=arity) + dfunc.add_descriptors(nv=nv) + dfunc.add_descriptors(instrum_str=instrum_str) for optim in optims: for nw in [1, 10]: for budget in [50, 500, 5000]: @@ -397,7 +492,10 @@ def sequential_instrum_discrete(seed: tp.Optional[int] = None) -> tp.Iterator[Ex seedg = create_seed_generator(seed) # Discrete, unordered. - optims = get_optimizers("discrete", seed=next(seedg)) + # optims = get_optimizers("discrete", seed=next(seedg)) + optims = ["DiscreteOnePlusOne", "NGOpt", "CMA", "TwoPointsDE", "DiscreteLenglerOnePlusOne"] + optims = ["OnePlusOne"] + optims = ["DiscreteLenglerOnePlusOne"] for nv in [10, 50, 200, 1000, 5000]: for arity in [2, 3, 7, 30]: for instrum_str in ["Unordered", "Softmax", "Ordered"]: @@ -412,6 +510,8 @@ def sequential_instrum_discrete(seed: tp.Optional[int] = None) -> tp.Iterator[Ex corefuncs.DiscreteFunction(name, arity), instrum.set_name(instrum_str) ) dfunc.add_descriptors(arity=arity) + dfunc.add_descriptors(nv=nv) + dfunc.add_descriptors(instrum_str=instrum_str) for optim in optims: for budget in [50, 500, 5000, 50000]: yield Experiment(dfunc, optim, budget=budget, seed=next(seedg)) @@ -425,6 +525,23 @@ def deceptive(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: seedg = create_seed_generator(seed) names = ["deceptivemultimodal", "deceptiveillcond", "deceptivepath"] optims = get_optimizers("basics", seed=next(seedg)) + optims = ["CMA", "DE", "TwoPointsDE", "PSO", "OnePlusOne", "RandomSearch", "NGOptRW"] + optims = [ + "BFGS", + "LBFGSB", + "DE", + "TwoPointsDE", + "RandomSearch", + "OnePlusOne", + "PSO", + "CMA", + "ChainMetaModelSQP", + "MemeticDE", + "MetaModel", + "RFMetaModel", + "MetaModelDE", + "RFMetaModelDE", + ] functions = [ ArtificialFunction( name, block_dimension=2, num_blocks=n_blocks, rotation=rotation, aggregator=aggregator @@ -436,7 +553,21 @@ def deceptive(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: ] for func in functions: for optim in optims: - for budget in [25, 37, 50, 75, 87] + list(range(100, 20001, 500)): + for budget in [ + 25, + 37, + 50, + 75, + 87, + 100, + 200, + 400, + 800, + 1600, + 3200, + 6400, + 12800, + ]: # + list(range(100, 20001, 500)): yield Experiment(func, optim, budget=budget, num_workers=1, seed=next(seedg)) @@ -578,6 +709,22 @@ def multimodal(seed: tp.Optional[int] = None, para: bool = False) -> tp.Iterator optims = get_optimizers("basics", seed=next(seedg)) if not para: optims += get_optimizers("scipy", seed=next(seedg)) + optims = [ + "BFGS", + "LBFGSB", + "DE", + "TwoPointsDE", + "RandomSearch", + "OnePlusOne", + "PSO", + "CMA", + "ChainMetaModelSQP", + "MemeticDE", + "MetaModel", + "RFMetaModel", + "MetaModelDE", + "RFMetaModelDE", + ] # + list(sorted(x for x, y in ng.optimizers.registry.items() if "Chain" in x or "BO" in x)) functions = [ ArtificialFunction(name, block_dimension=bd, useless_variables=bd * uv_factor) @@ -589,7 +736,9 @@ def multimodal(seed: tp.Optional[int] = None, para: bool = False) -> tp.Iterator for optim in optims: for budget in [3000, 10000, 30000, 100000]: for nw in [1000] if para else [1]: - yield Experiment(func, optim, budget=budget, num_workers=nw, seed=next(seedg)) + xp = Experiment(func, optim, budget=budget, num_workers=nw, seed=next(seedg)) + if not xp.is_incoherent: + yield xp @registry.register @@ -601,6 +750,22 @@ def hdmultimodal(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: # Keep in mind that Rosenbrock is multimodal in high dimension http://ieeexplore.ieee.org/document/6792472/. optims = get_optimizers("basics", "multimodal", seed=next(seedg)) + optims = [ + "BFGS", + "LBFGSB", + "DE", + "TwoPointsDE", + "RandomSearch", + "OnePlusOne", + "PSO", + "CMA", + "ChainMetaModelSQP", + "MemeticDE", + "MetaModel", + "RFMetaModel", + "MetaModelDE", + "RFMetaModelDE", + ] functions = [ ArtificialFunction(name, block_dimension=bd) for name in names @@ -631,10 +796,15 @@ def bonnans(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: optims = [ "RotatedTwoPointsDE", "DiscreteLenglerOnePlusOne", + "DiscreteLengler2OnePlusOne", + "DiscreteLengler3OnePlusOne", + "DiscreteLenglerHalfOnePlusOne", + "DiscreteLenglerFourthOnePlusOne", "PortfolioDiscreteOnePlusOne", "FastGADiscreteOnePlusOne", "DiscreteDoerrOnePlusOne", "DiscreteBSOOnePlusOne", + "DiscreteOnePlusOne", "AdaptiveDiscreteOnePlusOne", "GeneticDE", "DE", @@ -645,14 +815,20 @@ def bonnans(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: "MetaModel", "DiagonalCMA", ] + optims = ["RFMetaModelOnePlusOne"] + optims = ["MemeticDE", "cGA", "DoubleFastGADiscreteOnePlusOne", "FastGADiscreteOnePlusOne"] for i in range(21): bonnans = corefuncs.BonnansFunction(index=i) for optim in optims: + instrum_str = "TransitionChoice" if "Discrete" in optim else "Softmax" dfunc = ExperimentFunction( bonnans, - instrum.set_name("bitmap") if "Discrete" in optim else softmax_instrum.set_name("softmax"), + instrum.set_name("") if instrum_str == "TransitionChoice" else softmax_instrum.set_name(""), ) - for budget in [20, 50, 100]: + dfunc.add_descriptors(index=i) + dfunc.add_descriptors(instrum_str=instrum_str) + # dfunc._descriptors = {'index': i} + for budget in [20, 30, 40, 50, 60, 70, 80, 90, 100]: yield Experiment(dfunc, optim, num_workers=1, budget=budget, seed=next(seedg)) @@ -708,7 +884,20 @@ def yabbob( noise_level = 0 # Choosing the list of optimizers. - optims: tp.List[str] = get_optimizers("competitive", seed=next(seedg)) # type: ignore + # optims: tp.List[str] = get_optimizers("competitive", seed=next(seedg)) # type: ignore + optims = [ + "OnePlusOne", + "MetaModel", + "CMA", + "DE", + "PSO", + "TwoPointsDE", + "RandomSearch", + "ChainMetaModelSQP", + "NeuralMetaModel", + "MetaModelDE", + "MetaModelOnePlusOne", + ] if noise: optims += ["TBPSA", "SQP", "NoisyDiscreteOnePlusOne"] if hd: @@ -716,13 +905,88 @@ def yabbob( optims += get_optimizers("splitters", seed=next(seedg)) # type: ignore if hd and small: - optims = ["BO", "CMA", "PSO", "DE"] - + optims += ["BO", "PCABO", "CMA", "PSO", "DE"] + if small and not hd: + optims += ["PCABO", "BO", "Cobyla"] + optims = [ + "MetaModelDE", + "MetaModelOnePlusOne", + "OnePlusOne", + "ChainMetaModelSQP", + "RFMetaModel", + "RFMetaModelDE", + ] # if bounded: # optims = ["BO", "PCABO", "BayesOptimBO", "CMA", "PSO", "DE"] # if box: # optims = ["DiagonalCMA", "Cobyla", "NGOpt16", "NGOpt15", "CMandAS2", "OnePlusOne"] # List of objective functions. + optims = [ + "MetaModelDE", + "NeuralMetaModelDE", + "SVMMetaModelDE", + "RFMetaModelDE", + "MetaModelTwoPointsDE", + "NeuralMetaModelTwoPointsDE", + "SVMMetaModelTwoPointsDE", + "RFMetaModelTwoPointsDE", + "GeneticDE", + ] + optims = ["LargeCMA", "TinyCMA", "OldCMA", "MicroCMA"] + optims = ["BFGS", "LBFGSB"] + optims = get_optimizers("oneshot", seed=next(seedg)) # type: ignore + optims = [ + "MetaTuneRecentering", + "MetaRecentering", + "HullCenterHullAvgCauchyScrHammersleySearch", + "LHSSearch", + "LHSCauchySearch", + ] + optims = [ + "BFGS", + "LBFGSB", + "MicroCMA", + "RandomSearch", + "NoisyDiscreteOnePlusOne", + "TBPSA", + "TinyCMA", + "CMA", + "ChainMetaModelSQP", + "OnePlusOne", + "MetaModel", + "RFMetaModel", + "DE", + ] + optims = ["NGOpt", "NGOptRW"] + optims = ["QrDE", "QODE", "LhsDE"] + optims = ["NGOptRW"] + if noise: + optims = [ + # "MicroCMA", + # "TinyCMA", + # "SQP", + # "NoisyDiscreteOnePlusOne", + # "TBPSA", + # "RecombiningOptimisticNoisyDiscreteOnePlusOne", + # + # "CMA", + # "TinyCMA", + # "LPCMA", + # "VLPCMA", + # "MetaTuneRecentering", + # "MetaRecentering", + # "SPSA", + # "TinySQP", + # "MicroSQP", + # "TinySPSA", + # "MetaModel", + # "RFMetaModel", + # "RFMetaModelOnePlusOne", + "NoisyOnePlusOne", + # "MicroSPSA", + ] + else: + optims = ["MetaModelPSO", "RFMetaModelPSO", "SVMMetaModelPSO"] functions = [ ArtificialFunction( name, @@ -737,7 +1001,13 @@ def yabbob( for rotation in [True, False] for num_blocks in ([1] if not split else [7, 12]) for d in ( - [100, 1000, 3000] if hd else ([2, 5, 10, 15] if tuning else ([40] if bounded else [2, 10, 50])) + [100, 1000, 3000] + if hd + else ( + [2, 5, 10, 15] + if tuning + else ([40] if bounded else ([2, 3, 5, 10, 15, 20, 50] if noise else [2, 10, 50])) + ) ) ] @@ -799,7 +1069,7 @@ def f(x): budgets = ( [40000, 80000, 160000, 320000] if (big and not noise) - else ([50, 200, 800, 3200, 12800] if not noise else [3200, 12800]) + else ([50, 200, 800, 3200, 12800] if not noise else [3200, 12800, 51200, 102400]) ) if small and not noise: budgets = [10, 20, 40] @@ -1100,6 +1370,11 @@ def pbbob(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: "NGOptRW", "NGOpt", ] + optims = ["ChainMetaModelSQP", "MetaModelOnePlusOne", "MetaModelDE"] + optims = ["LargeCMA", "TinyCMA", "OldCMA", "MicroCMA"] + optims = ["BFGS", "LBFGSB", "MemeticDE"] + optims = ["QrDE", "QODE", "LhsDE", "NGOpt", "NGOptRW"] + optims = ["TinyCMA", "QODE", "MetaModelOnePlusOne", "LhsDE", "TinyLhsDE", "TinyQODE"] dims = [40, 20] functions = [ ArtificialFunction(name, block_dimension=d, rotation=rotation, expo=expo) @@ -1364,6 +1639,8 @@ def spsa_benchmark(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """ seedg = create_seed_generator(seed) optims: tp.List[str] = get_optimizers("spsa", seed=next(seedg)) # type: ignore + optims += ["CMA", "OnePlusOne", "DE", "PSO"] + optims = ["SQP", "NoisyDiscreteOnePlusOne", "NoisyBandit"] for budget in [500, 1000, 2000, 4000, 8000, 16000, 32000, 64000, 128000]: for optim in optims: for rotation in [True, False]: @@ -1442,6 +1719,7 @@ def aquacrop_fao(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: funcs = [NgAquacrop(i, 300.0 + 150.0 * np.cos(i)) for i in range(3, 7)] seedg = create_seed_generator(seed) optims = get_optimizers("basics", seed=next(seedg)) + optims = ["BFGS", "LBFGSB", "MemeticDE"] for budget in [25, 50, 100, 200, 400, 800, 1600]: for num_workers in [1, 30]: if num_workers < budget: @@ -1458,6 +1736,7 @@ def fishing(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: funcs = [OptimizeFish(i) for i in [17, 35, 52, 70, 88, 105]] seedg = create_seed_generator(seed) optims = get_optimizers("basics", seed=next(seedg)) + optims += ["NGOpt", "NGOptRW", "ChainMetaModelSQP"] for budget in [25, 50, 100, 200, 400, 800, 1600]: for algo in optims: for fu in funcs: @@ -1474,6 +1753,11 @@ def rocket(seed: tp.Optional[int] = None, seq: bool = False) -> tp.Iterator[Expe funcs = [Rocket(i) for i in range(17)] seedg = create_seed_generator(seed) optims = get_optimizers("basics", seed=next(seedg)) + optims += ["NGOpt", "NGOptRW", "ChainMetaModelSQP"] + optims = ["BFGS", "LBFGSB", "MemeticDE"] + optims = ["CMA", "PSO", "QODE", "QRDE", "MetaModelPSO"] + if seq: + optims += ["BFGS", "LBFGSB", "MemeticDE"] for budget in [25, 50, 100, 200, 400, 800, 1600]: for num_workers in [1] if seq else [1, 30]: if num_workers < budget: @@ -1541,7 +1825,7 @@ def control_problem(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: f.parametrization.freeze() funcs2.append(f) optims = get_optimizers("basics") - + optims = ["NGOpt", "PSO", "CMA"] for budget in [50, 75, 100, 150, 200, 250, 300, 400, 500, 1000, 3000, 5000, 8000, 16000, 32000, 64000]: for algo in optims: for fu in funcs2: @@ -1568,7 +1852,7 @@ def neuro_control_problem(seed: tp.Optional[int] = None) -> tp.Iterator[Experime ] optims = ["CMA", "NGOpt4", "DiagonalCMA", "NGOpt8", "MetaModel", "ChainCMAPowell"] - + optims = ["NGOpt", "CMA", "PSO"] for budget in [50, 500, 5000, 10000, 20000, 35000, 50000, 100000, 200000]: for algo in optims: for fu in funcs: @@ -1591,6 +1875,7 @@ def olympus_surfaces(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: seedg = create_seed_generator(seed) optims = get_optimizers("basics", "noisy", seed=next(seedg)) + optims = ["NGOpt", "CMA"] for budget in [25, 50, 100, 200, 400, 800, 1600, 3200, 6400, 12800, 25600]: for num_workers in [1]: # , 10, 100]: if num_workers < budget: @@ -1613,6 +1898,7 @@ def olympus_emulators(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: seedg = create_seed_generator(seed) optims = get_optimizers("basics", "noisy", seed=next(seedg)) + optims = ["NGOpt", "CMA"] for budget in [25, 50, 100, 200, 400, 800, 1600, 3200, 6400, 12800, 25600]: for num_workers in [1]: # , 10, 100]: if num_workers < budget: @@ -1762,6 +2048,8 @@ def mldakmeans(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: ] seedg = create_seed_generator(seed) optims = get_optimizers("splitters", "progressive", seed=next(seedg)) + optims += ["DE", "CMA", "PSO", "TwoPointsDE", "RandomSearch"] + optims = ["QODE", "QRDE"] for budget in [1000, 10000]: for num_workers in [1, 10, 100]: if num_workers < budget: @@ -2016,7 +2304,16 @@ def double_o_seven(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: } env = base_env.with_agent(player_0=random_agent).as_single_agent() dde = ng.optimizers.DifferentialEvolution(crossover="dimension").set_name("DiscreteDE") - optimizers: tp.List[tp.Any] = ["PSO", dde, "MetaTuneRecentering", "DiagonalCMA"] + optimizers: tp.List[tp.Any] = [ + "PSO", + dde, + "MetaTuneRecentering", + "DiagonalCMA", + "TBPSA", + "SPSA", + "RecombiningOptimisticNoisyDiscreteOnePlusOne", + "MetaModelPSO", + ] for num_repetitions in [1, 10, 100]: for archi in ["mono", "multi"]: for optim in optimizers: @@ -2056,6 +2353,8 @@ def multiobjective_example( ), ] optims += ["DiscreteOnePlusOne", "DiscreteLenglerOnePlusOne"] + optims = ["PymooNSGA2", "PymooBatchNSGA2", "LPCMA", "VLPCMA", "CMA"] + optims = ["LPCMA", "VLPCMA", "CMA"] popsizes = [20, 40, 80] optims += [ ng.families.EvolutionStrategy( @@ -2166,14 +2465,47 @@ def far_optimum_es(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: @registry.register -def photonics(seed: tp.Optional[int] = None, as_tuple: bool = False) -> tp.Iterator[Experiment]: +def photonics( + seed: tp.Optional[int] = None, + as_tuple: bool = False, + small: bool = False, + ultrasmall: bool = False, +) -> tp.Iterator[Experiment]: """Too small for being interesting: Bragg mirror + Chirped + Morpho butterfly.""" seedg = create_seed_generator(seed) + divider = 2 if small else 1 + if ultrasmall: + divider = 4 optims = get_optimizers("es", "basics", "splitters", seed=next(seedg)) # type: ignore + optims = [ + "MemeticDE", + "PSO", + "DE", + "CMA", + "OnePlusOne", + "TwoPointsDE", + "GeneticDE", + "ChainMetaModelSQP", + "MetaModelDE", + "SVMMetaModelDE", + "RFMetaModelDE", + "BFGS", + "LBFGSB", + ] + optims = ["QrDE", "QODE", "RFMetaModelDE"] for method in ["clipping", "tanh"]: # , "arctan"]: - for name in ["bragg", "chirped", "morpho", "cf_photosic_realistic", "cf_photosic_reference"]: - func = Photonics(name, 60 if name == "morpho" else 80, bounding_method=method, as_tuple=as_tuple) - for budget in [1e3, 1e4, 1e5, 1e6]: + for name in ( + ["bragg"] + if ultrasmall + else ["bragg", "chirped", "morpho", "cf_photosic_realistic", "cf_photosic_reference"] + ): + func = Photonics( + name, + 4 * ((60 // divider) // 4) if name == "morpho" else 80 // divider, + bounding_method=method, + as_tuple=as_tuple, + ) + for budget in [1e1, 1e2, 1e3]: for algo in optims: xp = Experiment(func, algo, int(budget), num_workers=1, seed=next(seedg)) if not xp.is_incoherent: @@ -2186,6 +2518,30 @@ def photonics2(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: return photonics(seed, as_tuple=True) +@registry.register +def ultrasmall_photonics(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Counterpart of yabbob with higher dimensions.""" + return photonics(seed, as_tuple=False, small=True, ultrasmall=True) + + +@registry.register +def ultrasmall_photonics2(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Counterpart of yabbob with higher dimensions.""" + return photonics(seed, as_tuple=True, small=True, ultrasmall=True) + + +@registry.register +def small_photonics(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Counterpart of yabbob with higher dimensions.""" + return photonics(seed, as_tuple=False, small=True) + + +@registry.register +def small_photonics2(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: + """Counterpart of yabbob with higher dimensions.""" + return photonics(seed, as_tuple=True, small=True) + + @registry.register def adversarial_attack(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]: """Pretrained ResNes50 under black-box attacked. @@ -2246,9 +2602,10 @@ def pbo_suite(seed: tp.Optional[int] = None, reduced: bool = False) -> tp.Iterat index += 1 if reduced and index % 13: continue - for instrumentation in ["Unordered"] if reduced else ["Softmax", "Ordered", "Unordered"]: + for instrumentation in ["Softmax", "Ordered", "Unordered"]: try: func = iohprofiler.PBOFunction(fid, iid, dim, instrumentation=instrumentation) + func.add_descriptors(instrum_str=instrumentation) except ModuleNotFoundError as e: raise fbase.UnsupportedExperiment("IOHexperimenter needs to be installed") from e for optim in list_optims: diff --git a/nevergrad/benchmark/exporttable.py b/nevergrad/benchmark/exporttable.py index c5c118d22a..edc628dc59 100644 --- a/nevergrad/benchmark/exporttable.py +++ b/nevergrad/benchmark/exporttable.py @@ -46,7 +46,8 @@ def export_table(filename: str, rows: t.List[t.Any], cols: t.List[t.Any], data: f.write("\\lccode`7=`7\n") f.write("\\lccode`8=`8\n") f.write("\\lccode`9=`9\n") - f.write(r"\\newcolumntype{P}[1]{>{\hspace{0pt}}p{#1}}\n") + f.write(r"\newcolumntype{P}[1]{>{\hspace{0pt}}p{#1}}") + f.write("\n") f.write("\\begin{document}\n") f.write("\\scriptsize\n") f.write("\\renewcommand{\\arraystretch}{1.5}\n") diff --git a/nevergrad/benchmark/optgroups.py b/nevergrad/benchmark/optgroups.py index 72801d4866..e02aed2751 100644 --- a/nevergrad/benchmark/optgroups.py +++ b/nevergrad/benchmark/optgroups.py @@ -45,9 +45,6 @@ def get_optimizers(*names: str, seed: tp.Optional[int] = None) -> tp.List[Optim] @registry.register def large() -> tp.Sequence[Optim]: return [ - "NGO", - "Shiwa", - "DiagonalCMA", "CMA", "PSO", "DE", diff --git a/nevergrad/benchmark/plotting.py b/nevergrad/benchmark/plotting.py index a95f6cf43a..51b2cc26c6 100644 --- a/nevergrad/benchmark/plotting.py +++ b/nevergrad/benchmark/plotting.py @@ -26,6 +26,7 @@ _DPI = 250 +pure_algorithms = [] # %% Basic tools @@ -256,7 +257,14 @@ def create_plots( "block_dimension", "num_objectives", ): - df[col] = df[col].astype(float).astype(int) + try: + df[col] = df[col].astype(float).astype(int) + except Exception as e1: + try: + for i in range(len(df[col])): + float(df[col][i]) + except Exception as e2: + assert False, f"Fails at row {i+2}, Exceptions: {e1}, {e2}" elif col != "loss": df[col] = df[col].astype(str) df[col] = df[col].replace(r"\.[0]*$", "", regex=True) @@ -382,6 +390,8 @@ def create_plots( with open(str(output_folder / name) + ".cp.txt", "w") as f: f.write(fullname) f.write("ranking:\n") + global pure_algorithms + pure_algorithms = list(data_df.columns[:]) for i, algo in enumerate(data_df.columns[:58]): f.write(f" algo {i}: {algo}\n") if name == "fight_all.png": @@ -405,7 +415,9 @@ def create_plots( # Average normalized plot with everything. out_filepath = output_folder / "xpresults_all.png" data = XpPlotter.make_data(df, normalized_loss=True) - xpplotter = XpPlotter(data, title=os.path.basename(output_folder), name_style=name_style, xaxis=xpaxis) + xpplotter = XpPlotter( + data, title=os.path.basename(output_folder), name_style=name_style, xaxis=xpaxis, pure_only=True + ) xpplotter.save(out_filepath) # Now one xp plot per case. for case in cases: @@ -479,6 +491,7 @@ def __init__( title: str, name_style: tp.Optional[tp.Dict[str, tp.Any]] = None, xaxis: str = "budget", + pure_only: bool = False, ) -> None: if name_style is None: name_style = NameStyle() @@ -491,6 +504,13 @@ def __init__( # plot from best to worst lowerbound = np.inf sorted_optimizers = sorted(optim_vals, key=lambda x: optim_vals[x]["loss"][-1], reverse=True) + if pure_only: + assert len(pure_algorithms) > 0 + # print(sorted_optimizers, " merged with ", pure_algorithms) + sorted_optimizers = [ + o for o in sorted_optimizers if o + " " in [p[: (len(o) + 1)] for p in pure_algorithms] + ] + # print("Leads to ", sorted_optimizers) self._fig = plt.figure() self._ax = self._fig.add_subplot(111) # use log plot? yes, if no negative value diff --git a/nevergrad/functions/base.py b/nevergrad/functions/base.py index eebed0c61a..6e3a251179 100644 --- a/nevergrad/functions/base.py +++ b/nevergrad/functions/base.py @@ -65,6 +65,11 @@ def __new__(cls: tp.Type[EF], *args: tp.Any, **kwargs: tp.Any) -> EF: inst._descriptors = { x: y for x, y in callargs.items() if isinstance(y, (str, tuple, int, float, bool)) } + # if "bonnans" in str(cls.__name__) or "discrete" in str(cls.__name__) or "pbo" in str(cls.__name__): + # inst._descriptors = { + # x: y for x, y in callargs.items() if isinstance(y, (str, tuple, int, float, bool)) and "dimension" not + # in x and "paramet" not in x + # } inst._descriptors["function_class"] = cls.__name__ return inst # type: ignore diff --git a/nevergrad/functions/images/imagelosses.py b/nevergrad/functions/images/imagelosses.py index c92d6959c9..7e3ecfdacb 100644 --- a/nevergrad/functions/images/imagelosses.py +++ b/nevergrad/functions/images/imagelosses.py @@ -143,7 +143,7 @@ def __call__(self, img: np.ndarray) -> float: assert img.shape[2] == 3 assert len(img.shape) == 3 img = np.asarray(img, dtype=np.float64) - return -float(cv2.Laplacian(img, cv2.CV_64F).var()) + return -float(cv2.Laplacian(img, cv2.CV_64F).var()) # type: ignore @registry.register diff --git a/nevergrad/functions/leaderboard.csv b/nevergrad/functions/leaderboard.csv index 2d4a9ff353..0627b3cdc9 100644 --- a/nevergrad/functions/leaderboard.csv +++ b/nevergrad/functions/leaderboard.csv @@ -1,25 +1,45 @@ ,loss,array +087907803798373,599.9999999994982,"0.9488701515776654]""" +"6654]""",, "arcoating,10,400",8.460052234911,"[3.5078401975334836,2.1313636423967908,5.74820620293398,5.8487451286666765,4.8545109985384265,3.4824877261243303,5.260839045029202,2.5405692757032448,6.3167358091135775,3.9099033401163568]" "arcoating,35,700",10.692182923879,"[2.6438666218109157,2.8264535192990303,1.4644717232196065,1.3868690833200255,5.359631966189977,3.1794520866189395,3.7842831452895944,5.036924894535986,1.962600321947623,8.41130931768094,3.433023356743832,4.331452421465162,4.767654438750657,3.8507206829026672,2.324004019122509,6.9649988590088485,4.868708512859612,3.422629321592912,8.940128097193993,1.3983849733094373,5.3559599333124925,6.008318242225357,6.787042128161735,3.14502009453323,1.7507149596938647,5.823179835229929,3.885850430482339,1.491871315262994,5.650931351579339,5.648724887342128,6.812382941543163,4.0181619570133496,3.1819259775435844,7.893933874893283,7.199368100107198]" "arcoating,40,400",0.093393072709,"[3.0732156068201593,1.229052862896116,2.753060560102575,1.0000726953581218,1.0746483434661949,1.0114731725809747,1.1666097505189255,1.1239332667742468,2.438826515244326,4.223835950411204,1.9585945324494896,7.903478226209001,1.0017935251701398,1.63289560241918,4.4707203748693125,1.5482324594259556,7.818950641765108,8.40109295866299,1.6348515533524308,8.964556236383432,5.9053153613790785,5.959173124579851,4.811447909285858,6.029278959615834,7.7583587040317665,7.135275955147948,7.257480334081463,1.3814926964170782,6.408411364779704,7.667793098590952,6.467284553444443,3.022378328005887,6.9552775844113945,8.701871535508106,3.732795643328979,7.720812252384855,5.980513088233799,8.307109956365855,8.835064723333073,6.751327908180106]" "arcoating,70,1000",16.343215903293,"[5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0]" "arcoating,8,400",13.100717405397,"[5.0,6.333333333333333,7.666666666666666,9.0,9.0,9.0,9.0,9.0]" "bragg,16",0.932163816851,"[2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5,2.5]" +"bragg,20",0.26370031872,"[2.998364626753805,2.0001074108077637,2.9986821208836005,2.000361096693583,2.9991989962548136,2.0000590667127245,2.999860481380005,2.0013110227884896,2.997941154051798,2.0015364688748907,87.13358829971995,108.0771874084705,84.65750087658841,108.629459799926,84.6762463036726,105.8421510108027,86.72960407519366,106.6075140436643,87.29576052145039,105.30171670965183]" +"bragg,40",0.060042569169,"[3.0,2.0,3.0,2.0,2.9921018727896174,2.556185653707756,2.0,3.0,2.0,3.0,2.0,3.0,2.0,2.78089812089261,3.0,2.0,3.0,2.0,3.0,2.0,81.85048648120221,103.40845081696078,91.03041653572016,100.89528443524628,104.3789260959985,167.89870773566335,116.69132888282667,82.91546169078914,107.12477772036188,87.30165748774004,105.99606480281693,85.44843088843211,107.68209478044534,180.0,90.79404105608135,107.26309208762166,85.10052110978467,102.76832976413321,86.35727350241352,107.02623793814124]" "bragg,80",0.000694261986,"[2.999999999999995,2.0,2.9999999999999982,2.0,2.9999999999999942,2.0000000000000204,2.9999999999999982,2.0000000000000204,2.999999999999997,2.0000000000000577,2.999999999999994,2.0000000000000178,2.9999999999999996,2.0000000000000258,2.9999999999999956,2.0,3.0,2.0,3.0,2.0000000000000258,2.9999999999998677,2.0,2.999999999999998,2.0000000000000524,2.999999999999998,2.0,2.999999999999731,2.0000000000000204,3.0,2.0,3.0,2.0,2.999999999999869,2.0000000000000524,2.999999999999998,2.0,2.999999999999731,2.0,2.9999999999999982,2.0,86.60255045244762,106.06599965930195,86.60255443305651,106.06600747129924,86.60257272790835,106.06594450536937,86.60261007043326,106.06596664220048,86.60256991846003,106.06601083817202,86.60251126812021,106.06605239950612,86.60252027566017,106.06598075034444,86.60259101280616,106.06599907105759,86.60256520284221,106.06597421940252,86.60257498882078,106.06598483902887,86.60258117175987,106.06597850626063,86.60257221703684,106.06597517269579,86.60257273903393,106.06600072357362,86.60255405767539,106.06597375534662,86.60256934296459,106.06596027660167,86.60259348834224,106.06598724294983,86.60256638431304,106.06597523757073,86.60257261435468,106.06600050328808,86.60255444923789,106.06596589510059,86.60257650073991,106.06599053476317]" -"bragg_as_tuple,80",0.040225964232,"[3.0,104.22575393152347,2.0,104.63209813326603,3.0,109.85745845151739,2.368905957545531,107.28473480141925,2.0,104.98063405099367,3.0,106.66132870204113,2.0,107.36834852091465,2.0,104.96146001749587,3.0,106.17286258956503,3.0,108.31244650941055,3.0,101.40314345649449,3.0,102.2635542128241,2.0,105.94418516460463,2.873277637801231,105.82537507400967,2.0,107.65130878698021,3.0,103.77323491104285,3.0,105.00045324824586,2.174024543417391,107.36325567360004,3.0,105.78354883573766,2.0,106.61225455869791,3.0,104.02141960118333,2.0,104.45388929235855,2.0,103.87595516741028,3.0,107.80538741150086,2.0,102.79943690884136,2.0,105.76107783008263,2.0,101.38803969565154,3.0,106.15321840689721,3.0,106.23176984984664,3.0,105.58192095050462,2.0,103.01763572764182,3.0,104.76940586737547,2.0,106.50924794354795,2.0,107.56452098004928,3.0,104.45286786710051,2.0,101.98789496656163,2.0,107.34754586858566,3.0,105.41545904152017,2.783290555499425,103.53257158512108,2.6118006586467475,102.8599228772668]" +"bragg_as_tuple,20",0.0010551854360000001,"[2.96990658846803,179.99966546149128,2.998902264319352,30.000241270329738,2.8265591075008514,179.99855339763468,2.0330863007122586,30.0075468811403,2.729187129693892,179.99997601273367,2.0864372478250535,101.79937826535131,2.157987763145396,179.9731762612071,2.9999305257329834,169.45231739088968,2.990478427835228,131.29092080107694,2.0000209948939642,168.1123248736521]" +"bragg_as_tuple,40",5.5972377e-05,"[2.948532392340755,180.0,3.0,30.0,3.0,131.06741701601484,2.767549191280395,30.5420401371438,3.0,179.65393533792871,2.7442402300284474,30.0,2.0966015662381756,161.99141733419847,2.953488425364594,30.0,2.016551585806329,180.0,3.0,179.99999998383626,2.292308532321335,32.51293698396886,2.189400571019735,180.0,2.149885114318265,121.61754339742575,2.0077928484103507,179.95644814413401,2.9999994034898143,102.23814079927654,3.0,180.0,3.0,180.0,2.999999840775769,180.0,2.031428198020951,180.0,2.9992244152448473,123.81355946097594]" +"bragg_as_tuple,80",2.45365e-07,"[2.9549331072639724,158.75621656323568,2.0480044357819165,30.0,2.0,55.27628613454472,3.0,67.23237818684427,3.0,30.0,2.9999989607387887,180.0,3.0,30.0,2.0,180.0,2.0,30.0,3.0,119.67526617340124,3.0,30.0,3.0,179.99990709110386,2.0,30.0,2.242564929138231,180.0,2.0000000000001,30.0,2.9995932244962082,47.56425920994913,2.0,30.0,2.0,180.0,2.0,30.00714481618614,2.9848087851737124,150.86140143154492,2.0130071645286125,180.0,3.0,180.0,2.4383203911775704,30.0,2.9998437592268403,30.00062024271962,2.0,30.0,2.121112606913359,30.000000000804718,3.0,30.0,3.0,30.0,3.0,30.0,2.974233005367781,180.0,2.707185961252868,180.0,2.0,123.13187888743329,2.0000004584930373,81.98162132924631,2.9975818344303513,30.0,2.0,180.0,3.0,30.0,2.0,30.0,3.0,144.52320172542886,3.0,30.0,2.0,179.99993209998826]" "cf_photosic_realistic,20",0.0860257425449999,"[2.0,3.0,2.1076,2.0,3.0,2.5783,2.0,3.0,2.0,3.0,90.0231,78.9789,72.8369,99.9577,82.7487,62.7583,104.1682,139.9002,93.3356,75.6039]" +"cf_photosic_realistic,40",0.006834812177000001,"[1.8448076337942485,1.5752964234856326,2.9031806811402845,4.274856113986125,5.930077195785369,5.118296006594735,4.5661298813336115,5.336743015564694,4.8751004072893025,3.3365200126208445,2.3808103158606047,1.778089786945824,2.4152195567556576,1.6776162044329448,3.618436080840265,6.795642693396257,6.182464813823576,5.0328064801025585,2.621393263573584,7.61214056900606,40.50110813461771,35.15466194674434,30.66683659316608,47.86180197526095,167.6582645007217,42.30938245971965,91.69534538260075,41.35380130989944,64.27253585507262,61.78587741879744,58.52113117825871,171.73235033539623,144.14783191364023,174.52584533984646,63.57125873977025,113.59466811951472,36.93988807988892,48.15876779055445,162.1563880600043,45.57703788786838]" +"cf_photosic_realistic,80",0.011704014979999998,"[1.5354759424912032,3.7634283854949064,7.78055303945254,5.419009506164515,5.887505417790904,5.300327441401545,4.693285717949118,5.276400402425426,4.147953513561486,5.442226639500226,5.058313416220825,6.329593225605656,5.309365878301927,5.357638072367861,5.404854028026162,6.454853724025947,5.199665376183837,5.324646998652084,4.678629289422255,4.782255897667831,4.487306261881625,4.400458418783036,4.326756334112336,4.548763117259155,4.587200893804092,4.883841853734641,4.603366853105259,5.3918143230702835,4.84972133207243,4.686226787052884,5.2537087624393415,4.065915062434549,6.038037463134169,3.8776073040942354,5.606534847884856,4.859561195312973,4.447980485127308,4.946818321073501,4.052893773250138,8.080498848247768,114.35575173077771,67.18182512728241,95.34537711525937,99.12046541353337,98.1765822651763,73.36892584266718,119.20393211851366,98.81014955886698,120.39017966600399,89.06991537834554,86.29833751834202,95.80948667011639,85.69154339261523,65.07678408915123,80.69654401384784,96.33240855344756,83.11648895958243,105.62899287533395,122.53605144314626,94.81006597422127,100.85484935205464,101.51520718936922,96.8402283546034,110.60545941550778,69.46675675126359,94.93649649931615,100.63181845019137,109.88754207089882,123.44771989877177,98.23001404277618,93.4252830975658,114.80007609127351,98.76244219146753,125.53766468122589,105.26360561494576,94.67865828463431,121.14819160950388,78.22860163450872,120.72917887507884,44.409759060224054]" +"cf_photosic_realistic_as_tuple,40",0.348172807944,"[1.0016922423071986,30.077992408849965,8.996212944794262,173.62108466874264,9.0,39.11309181551697,8.972365568065815,30.772436561525893,1.5285333768055782,30.000198836608917,3.5626235577354253,31.582563789359682,2.709344855458358,30.001112055765105,1.3190462866510155,30.39185869049585,1.2129765787319151,38.68841939012668,1.0000143886280801,30.00000044034563,8.368891217008336,30.00000000000061,1.0078842676334916,38.504478451856215,1.0000000015059185,30.019317610268388,8.999181412786786,30.000017931320897,2.2280626868846514,30.0002005920108,1.0277153512797597,30.00000000057625,1.0045818032112317,30.000635312294676,8.816749567564345,154.33909209731098,1.042655780051407,30.00526784492037,1.0183047119242734,147.9313838866714]" +"cf_photosic_realistic_as_tuple,80",0.40416977290899997,"[9.0,30.0,1.0004533379699811,153.81646404402713,9.0,78.72937421676897,5.9438720375770595,67.3132266615084,1.1217971357425833,98.98758721772973,1.0,86.4013458172307,1.522779937944145,124.7981352984252,1.0,30.0,1.0306189608156928,30.0,1.1868625360678497,167.06791550951903,9.0,180.0,1.0,180.0,9.0,180.0,8.22024890870132,180.0,1.0,180.0,1.0,180.0,1.0000145660471578,112.15013346953317,7.982797290869866,63.902010487372685,1.0051665533253256,46.238094022127996,2.0881739185199404,30.0,1.0,30.0,1.0,160.84225048904779,1.0,180.0,1.0,81.458388701988,1.7381504155696645,179.99917387231125,1.0,123.19052829814461,2.2901196519685083,30.000271428929608,1.0,32.129323023030466,9.0,30.0,1.0000003231782753,31.747439327107273,1.0,180.0,1.04681878490213,30.91215521011327,1.000000012368941,180.0,1.0,180.0,1.0000000000000004,139.9651585891046,1.0,168.42874914307842,1.0559553354291356,44.86481390371662,1.0000228988083206,180.0,1.0,30.0,1.0,30.0]" "cf_photosic_reference,16",0.431072337632,"[109.72985831489979,98.18574120560555,107.42439496690479,87.99264365961669,112.531428339492,89.65278659791956,105.05127078132273,103.79772329843809,96.93018121427048,133.71819394988918,99.02177080593655,109.72456993535758,115.95956118008563,92.84831198907784,118.42356371437981,103.77850212331678]" +"cf_photosic_reference,40",0.084939213136,"[81.39742494198082,101.44119728571083,158.0140104960585,121.6790534278067,145.81503548139966,122.09136794482608,143.2159642351609,120.28581189218119,143.68590105131614,123.17515218812616,135.78132612127558,121.49622264626781,140.50868649530034,119.10617900712968,142.73884544725277,118.05933430040943,141.68861960213334,118.2153522246694,140.29356028063077,118.12015221425766,142.80319825316133,119.70336315420906,141.9642808224119,120.41009879300351,148.86542372088152,119.20766379192047,156.74457524664513,128.2412139084457,180.0,151.44913736453114,180.0,134.311573138848,159.5878954063337,117.87077801701192,156.02745207576675,119.08396522657013,140.79077419270777,130.885157037691,90.38456262594073,75.81598303665277]" +"cf_photosic_reference,80",0.165955044925,"[125.4884376485969,31.43727941883506,176.81212680274388,163.64673359279863,30.0,30.0,30.0,71.56336730735535,30.0,30.0,60.132281235779,175.63036573239486,180.0,31.23975421957407,46.60278764515358,52.22319313678755,30.0,33.95360490313314,30.0,30.0,34.81114395503347,175.37126668418534,178.30316182499982,179.03557619632215,43.038612007695384,175.26482763106216,174.69345429657795,30.0,32.87217264940138,170.58395850009873,30.577092835567512,30.0,98.28872399072498,32.897365327420765,34.67421888315457,95.94717292400826,176.54174484362667,158.96194700502335,180.0,56.8021373584387,30.0,67.51086610415719,30.0,168.37571929336832,163.1834829467999,122.01267648173253,180.0,174.71085052869373,30.0,166.82793878846667,146.45048983362932,170.78674243606304,179.49135953574967,53.97384420929961,35.67944571001503,43.75630407024304,41.21954307574285,30.407798095659107,35.183188344660245,118.29440868693369,30.495811828657,176.3255931763546,175.70714392505408,163.89737672330656,62.62171370749238,179.56798525398935,179.80535183884012,30.0,32.11571708368048,156.34484056441445,37.784068080962264,177.5140670242546,176.5104064667321,140.59863065132043,141.18777805570252,145.97405030126149,151.29228373401608,129.87013695902078,46.98828162634734,95.16328465437428]" +"cf_photosic_reference_as_tuple,40",0.091297185071,"[86.31374469380421,142.6498696720864,30.949164948234042,174.42145016983812,41.2044204864721,31.81520550422306,37.543370800571594,36.70766292101666,31.026730237242546,32.44140386916378,176.9610860650124,169.26742200144807,32.0118638877245,178.93270473624617,31.283529848133725,162.1471901016077,158.66216050140306,123.59667189683084,177.02402551758075,176.81181753392656,32.67593820810953,31.09431062163982,178.38238368014754,138.24386947653636,170.13027288719655,132.2039426188142,178.80435640903076,156.10020324646115,172.99541835318405,139.90503246193802,153.02815350629,122.70673525123529,169.27386278179637,67.68909273128969,32.67397813714399,32.14883823434653,90.58391571146781,141.17101578808484,97.50130059724017,74.14782206811655]" +"cf_photosic_reference_as_tuple,80",0.165419075745,"[111.95017784796909,30.000000000101494,30.00029250578254,30.000009258589785,30.0,30.000000035181287,44.12050313610742,30.000831314945188,30.00000000003112,30.000000000109893,30.002238641555593,179.99999006917147,179.96112096635318,150.83399822611943,30.0000000000083,175.62758631826,179.99999999999903,30.261412444074807,30.0,179.10923566884648,58.43713264500381,37.590311448160165,55.97945335284986,30.000078146420606,32.17733626861619,180.0,30.00000007522314,171.1520153210928,160.33862032131975,179.97279549113927,30.032863629274644,30.002136623684464,30.0,30.003344734080372,179.50960168724092,178.6794946535474,135.97255681286566,174.40228023403552,30.0,179.99999999976603,179.99999545191224,180.0,30.946859476837034,179.9217236308837,30.25274316336551,30.0,30.134814687271387,172.681479007571,30.00000000014326,30.000000022542523,93.09322034819253,30.448678182495186,30.000311393916363,30.00017490673342,30.012584800651197,30.574528947045962,30.000019171796296,179.99470873318768,179.9735784165456,179.99999999999784,67.64754532991307,30.00034231284326,30.001801885212714,179.99973309685592,30.0,30.01585545132626,179.47733565204243,180.0,30.00000000000503,179.99999999997988,178.40639667113612,175.76100217580503,31.128186209527072,179.99999028858173,179.99979175116965,179.99258080239156,30.000000898482867,180.0,73.54252470493023,91.26996862619501]" "chirped,16",0.594587712739,"[109.72985831489979,98.18574120560555,107.42439496690479,87.99264365961669,112.531428339492,89.65278659791956,105.05127078132273,103.79772329843809,96.93018121427048,133.71819394988918,99.02177080593655,109.72456993535758,115.95956118008563,92.84831198907784,118.42356371437981,103.77850212331678]" "chirped,30",0.275923457222,"[89.04887416,109.54188095,89.74520725,121.81700431,179.99830918,124.38222473,95.31017129,116.0239629,92.92345776,118.06108198,179.99965859,116.89288181,88.90191494,110.30816229,93.11974992,137.42629858,118.81810084,110.74139708,85.15270955,100.9382438,81.44070951,100.6382896,84.97336252,110.59252719,134.89164276,121.84205195,89.28450356,106.72776991,85.77168797,102.33562547]" -"chirped,50",0.116563972901,"[93.65223418423034,118.48802644462693,107.4154177701731,146.1273996196957,104.76814425546243,122.46259169233943,101.63070554932264,132.01391635156088,113.23959177552805,134.21191762900222,104.52593631355413,123.84798946548463,99.53290830788647,124.5717299569956,114.84349635591072,136.69596308542052,94.99997481641495,107.55307917381322,82.394994109015,96.18784040922577,79.12042692721471,101.72854687985267,87.61947895483019,118.06040186972581,129.67099220470743,120.08173331893012,89.30208035845052,105.27994144055239,83.02744104990198,98.53738160919845,80.48856203248494,103.55664056758202,92.3091829150373,123.27796260700957,97.2529066260935,110.36369311643149,84.99816155046332,97.87908100788103,75.53207272339498,92.1090256322082,78.09126315339792,98.11980069427835,79.9260762785673,95.27087465581414,75.64339912130325,93.41051297755547,79.35090675442197,100.5778583399821,83.55758649426299,101.84134444518143]" +"chirped,40",0.175295774335,"[102.07652279378851,124.1637139218202,120.33773687239812,119.54544259309108,105.38152860884843,119.88010729325077,122.36316972182254,142.1087444785461,106.34394000276315,122.5164474333425,123.2078985282497,120.35357943039926,95.6003344021036,122.12686144393041,99.28965657196636,131.1436369351425,96.549204573179,107.52779113249292,84.7802544669498,101.75000546171083,84.01349819857245,108.53478012166869,101.82721066537768,127.16757921232485,98.0198295360771,106.25566487110592,83.00500085959983,102.57375107008194,78.18621146580061,90.09428888336491,75.29963556634469,101.08743774950291,80.0533947691719,94.61770194103921,71.98049150906604,96.97208670864934,79.4367014944811,102.61697513442303,82.99850827911939,103.64087357448088]" +"chirped,50",0.11656397290100001,"[93.65223418423034,118.48802644462693,107.4154177701731,146.1273996196957,104.76814425546243,122.46259169233943,101.63070554932264,132.01391635156088,113.23959177552805,134.21191762900222,104.52593631355413,123.84798946548463,99.53290830788647,124.5717299569956,114.84349635591072,136.69596308542052,94.99997481641495,107.55307917381322,82.394994109015,96.18784040922577,79.12042692721471,101.72854687985267,87.61947895483019,118.06040186972581,129.67099220470743,120.08173331893012,89.30208035845052,105.27994144055239,83.02744104990198,98.53738160919845,80.48856203248494,103.55664056758202,92.3091829150373,123.27796260700957,97.2529066260935,110.36369311643149,84.99816155046332,97.87908100788103,75.53207272339498,92.1090256322082,78.09126315339792,98.11980069427835,79.9260762785673,95.27087465581414,75.64339912130325,93.41051297755547,79.35090675442197,100.5778583399821,83.55758649426299,101.84134444518143]" "chirped,80",0.032443709031,"[88.52483361145642,108.92603698859098,97.04696158396622,179.9939570600735,94.19642075128272,103.30846616331984,81.29625993675596,95.05063197150761,77.709373680586,100.81610182681413,86.26517388248608,114.3151119519831,118.27692052373264,133.67143913893963,93.5311449270739,112.88478884103904,179.99906128209363,110.96886636925439,87.0525494740198,106.19238243264702,83.45680472650062,95.87480240302271,74.84514445698744,98.03831429770835,88.6130544913638,119.73483873341877,104.48001445731423,138.6323535761068,119.63010391311067,133.8309836804302,100.92157824350483,119.92550379046051,98.78641008123896,125.06765971003176,108.32078089504897,133.42173063233668,108.29941119571218,143.58622375887546,109.98502471825267,122.41478193726267,97.57862293264694,121.49336184447904,112.96592454386476,139.9492761564363,102.26548370710076,123.56636864464592,104.34370920471545,122.48140828139725,90.78171115586403,103.52126551047564,81.50102387202843,99.05592025264554,81.64377285362491,100.70682268909643,82.50875593897791,102.6738651822106,85.42810341360513,110.17382926802803,95.76846125991416,119.89313171852815,97.0419755376557,118.01612116654306,98.50691933568284,124.82813911505264,99.16589049747316,112.32579883953483,86.46250343538372,97.21862569154766,78.13118077960564,96.09801884928629,80.2192268261995,100.62638167590012,81.06326903762309,95.3241238185923,73.49025216866023,90.58651009490646,77.93688016160958,99.68921531581518,81.9810425130764,97.60997762584654]" +"chirped_as_tuple,40",0.17300336994100002,"[83.25625870174649,99.60034146203094,79.5399734942138,94.04020020290376,78.3104345302662,93.12650496474873,75.59416093109921,100.20515470814361,78.92536665391994,93.40087511190403,74.68779637746519,96.22601805255202,84.43373479952646,111.81016986247636,93.8122641092854,131.2146799397565,98.0039082074487,106.4869356621518,88.89775139670527,97.28574635987067,84.75450819292527,104.96407369789627,93.11971401135409,133.54255131197255,99.2592878266979,122.50741744910765,96.40511721651927,123.86775887498993,108.71613713909328,128.73997734603861,122.5699605414561,125.5573211073178,119.17201104011755,126.49238577380295,103.78127229839083,123.68051697173117,116.30732509006339,118.48848264026002,103.22584605431442,116.21725079649002]" +"chirped_as_tuple,80",0.055041416508,"[68.79656260139478,138.3985238965868,74.2114838796801,88.63468496280836,87.86067034629835,78.48218971566679,104.11930758310335,55.30897314075829,73.15703830146573,112.93975975188874,69.26239369748525,93.28285837111785,68.77841379622357,101.20606083771371,94.43284681891618,85.43848212538248,91.3304077689215,82.31960969001275,82.61114634310198,113.03955097291566,81.40456642873417,130.62212501796455,112.78501173478371,86.55686051317976,78.2863434147772,99.47586691082415,100.61133694833805,95.2372553061469,104.44207700019577,125.36080492740967,103.10990131703103,86.72377176891413,86.8760875280834,87.09429554082344,71.90777966928312,121.27104662936907,51.81776888288497,106.98439883109823,61.72699587174802,97.52890496933655,76.21732845779854,95.12956792077956,139.83404204795966,151.64848183481857,79.22004125671583,92.2946474454669,98.71162165000067,115.44148984601419,106.59903658441554,92.5769365765226,95.69796245139202,96.90695227034757,137.30465134794517,85.49594743354712,88.76803857635574,117.7414254275201,109.78660733367916,119.25742759089178,110.51723079579679,144.08377603035075,111.78615411583522,108.18463999908266,129.38320818176544,124.79734065211983,133.70223041486128,118.84316261842116,91.78069987050003,155.5552890457952,89.01627062100243,123.40582587060553,114.93391495189795,118.24612188595408,139.72564732191373,118.85132332215584,122.86842852155064,90.97040856698612,149.54413460292238,88.28852778956818,109.30970933403258,121.4628744746955]" "morpho,12",0.861490537019,"[252.1374714961974,106.36158148850492,89.2230430324995,447.5581110741118,66.40029844152743,63.58507516515964,199.35476565886609,361.1488814748131,366.7794730573734,67.77285864245286,112.75173391349733,81.40977079937937]" "morpho,16",1.296884857369,"[150.0,300.0,315.0,150.0,150.0,300.0,315.0,150.0,150.0,300.0,315.0,150.0,150.0,300.0,315.0,150.0]" "morpho,24",0.6984662118260001,"[67.26849677257343,79.24321185258943,84.29061134635378,58.83044583096272,89.66572506678874,86.60888828264055,415.8388734310249,599.8812594909314,351.94128455297187,24.34139102007844,366.79991579172713,583.7215097268089,142.5375766459714,332.92774484926065,298.8202195311825,356.7309670527933,287.1173309233849,404.2807210392618,299.98188714461025,50.225572629250514,285.53191584921916,44.076125606500476,293.1054295993933,30.933276872030664]" +"morpho,28",0.942813055027,"[247.87517591779843,225.0,224.8152446616811,122.9736092015733,114.34449798140076,293.272850948404,238.10548037895484,304.4091010587573,164.43051255916782,208.6956521739131,343.5585935843167,329.03225806451616,0.0,439.3032243540575,186.31115291941794,191.26433269018503,185.50950767024986,234.81355932203385,362.21832874901986,531.4051540529314,363.97183098591546,286.1650796425978,226.91074462607543,300.0,0.0,61.08534950564464,71.469064983831,18.88778747127882]" "morpho,36",0.567957669878,"[100.96536878725058,232.16468960961782,78.85523207361015,214.51223438763753,75.89441727306279,218.40581990210538,76.70459049976964,235.00081098229646,77.78059518483653,264.00128466299293,245.07267831506675,428.4287303804663,266.7856810799451,175.86829483871026,165.63461525208467,465.17445657405034,366.1270507007997,237.522658110795,263.7132091597654,167.32906314397042,357.8902823684377,410.9159102460021,372.02631916119753,258.35300162196523,451.044201067961,441.53945835010575,449.1650939325491,179.39104718198786,88.8271411737769,118.64558898301105,160.53307148993068,89.39389813292529,178.5255698838547,119.09614445773937,96.00413532059109,112.94976196610708]" "morpho,4",1.127904740499,"[154.7298583148998,293.18574120560555,317.42439496690474,132.9926436596167]" -"morpho,40",1.328768715877,"[150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0]" +"morpho,40",1.3287687158769999,"[150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,300.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,315.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0,150.0]" "morpho,48",0.534372807012,"[59.33041960751288,99.17042673266693,79.03990949984373,100.09608817250242,215.08536494524782,213.48080383766006,223.38605676295748,62.1877773107194,78.59192259916864,206.57254442847284,100.49504172262408,238.26268084590538,248.08033497215342,229.886747557948,433.3828290545404,251.2175606263238,285.9568063421582,547.58879753913,411.9624924656734,293.409498298531,208.29322551871047,263.62956155288805,191.701242059666,265.6697290883516,227.32598408391584,230.84889396426746,460.1048548025645,315.76739512321615,278.6271526313933,226.4690710457063,268.86615382660784,270.84424195020307,369.5653419525832,303.82934198377154,344.9339446335882,216.1983454359172,150.53249477057577,299.9978076550384,287.7024584829054,133.05659796829676,0.0,134.8996745953048,127.83643493293731,133.66493131686872,136.72142244431078,148.07302352778007,106.8677064965502,163.45841077158826]" "morpho,60",0.506129799275,"[38.67453681483682,88.28871817633663,84.73389094259322,228.7841915983999,78.4693541506427,74.51130065973761,238.62081383220206,84.92830393468446,205.48866573642988,225.78976695019594,88.95573577667194,84.17012501437259,230.54112705036326,76.22728816230067,228.8534308053093,325.2301133545765,199.91046929538717,522.5115049313821,509.1418693460837,459.06249748581547,277.022585281524,534.2806553873863,476.5669980315472,227.79666898503064,217.05741507842302,444.1141338132043,292.71525381625327,242.140266341181,123.88086466622762,243.09049745401435,329.36626303083824,301.6930056482294,281.1311060006105,326.1860125896211,453.55453061726837,538.4674329857884,240.32482835825087,379.4108259357168,375.6720323365376,274.1700953102454,402.37446179436074,470.5713737390224,444.6953636032597,448.7323854573844,523.2623296942974,221.40222977791905,280.99419285782517,110.24976061061855,122.0535877024353,88.821255667321,170.8305211691875,107.30104877051781,113.21800840812338,130.33200730579932,131.72665624297295,299.9995243960067,130.57982009633457,106.1567451891198,103.3634746091467,120.3740347248083]" "morpho,8",1.171114450093,"[142.5961411541705,148.53656440956593,307.6337800249742,294.38885149411345,301.41997857411707,300.96690932557925,151.1925400501033,140.24379338988612]" "morpho_as_tuple,16",1.296884857368,"[150.0,300.0,315.0,150.0,150.0,300.0,315.0,150.0,150.0,300.0,315.0,150.0,150.0,300.0,315.0,150.0]" +"morpho_as_tuple,28",-1232.76129154328,"[299.9999983009865,599.9998671241385,30.000062179765905,2.0628138486245007e-07,299.99992465458445,4.114212060812861e-05,550.8258929855715,2.8518362698880537,215.1238235461269,599.9999999990894,599.99999613828,280.4969476318712,299.99973119389733,8.717222499399213e-09,30.000579346486802,71.54905908676292,0.00571462052255356,4.150774657318834e-07,30.000002858942707,296.3158321243161,298.3965369488135,580.6656020319131,599.9999335282394,0.021603101372761557,210.62072674606668,9.825329584600695e-06,600.0,249.05112220385973]" +"morpho_as_tuple,60",-226602.51406754585,"[2.601154847070575e-10,12.644628027761655,30.000527321358447,299.9987022015465,299.9998355930653,600.0,599.1047997417311,0.10874295041600135,299.99999956335347,0.0,30.0,4.3873740196431754e-09,299.99999998953206,600.0,244.7878675805427,0.00044233242539348794,299.9999999890084,0.3385589011462571,599.9999998789923,300.0,295.13201605339475,10.478052509958957,599.9999999999325,286.78793637037035,299.99995264105496,600.0,30.000047501845643,2.5807875260852597e-05,299.999999999981,0.03211330604881368,599.9999988736897,234.55879510712202,3.126388037344441e-13,28.851700707840394,30.000001791923864,0.00010432615104605247,55.00913041637341,3.1147301911005343e-06,30.0,18.284828979817405,300.0,599.9999999999784,600.0,1.367376967209566e-06,296.7952177016517,599.999995096543,30.000007974249115,0.0005508000268150681,0.13398649347792002,0.9919599987400716,31.8332312829815,299.999996611107,195.75081215751007,599.961354918705,599.9999963588548,299.9970120744066,39.219111301059485,28.087907803798373,599.9999999994982,0.9488701515776654]" diff --git a/nevergrad/functions/photonics/core.py b/nevergrad/functions/photonics/core.py index 6b516c4fd3..e0e53a51d2 100644 --- a/nevergrad/functions/photonics/core.py +++ b/nevergrad/functions/photonics/core.py @@ -75,7 +75,7 @@ def _make_parametrization( ), f"Cannot work with dimension {dimension} for {name}: not divisible by {shape[0]}." b_array = np.array(bounds) assert b_array.shape[0] == shape[0] # pylint: disable=unsubscriptable-object - ones = np.ones((1, shape[1])) + ones = np.ones((1, int(shape[1]))) init = np.sum(b_array, axis=1, keepdims=True).dot(ones) / 2 # type: ignore if as_tuple: instrum = ng.p.Instrumentation( diff --git a/nevergrad/optimization/differentialevolution.py b/nevergrad/optimization/differentialevolution.py index aa0cecf619..5726645a39 100644 --- a/nevergrad/optimization/differentialevolution.py +++ b/nevergrad/optimization/differentialevolution.py @@ -175,6 +175,10 @@ def recommend(self) -> p.Parameter: # This is NOT the naive version. We deal wi def _internal_ask_candidate(self) -> p.Parameter: if len(self.population) < self.llambda: # initialization phase init = self._config.initialization + if self.sampler is None and init == "QO": + self.sampler = oneshot.SamplingSearch( + sampler="Hammersley", scrambled=True, opposition_mode="quasi" + )(self.parametrization, budget=self.llambda) if self.sampler is None and init not in ["gaussian", "parametrization"]: assert init in ["LHS", "QR"] self.sampler = oneshot.SamplingSearch( @@ -306,7 +310,7 @@ class DifferentialEvolution(base.ConfiguredOptimizer): Parameters ---------- - initialization: "parametrization", "LHS" or "QR" + initialization: "parametrization", "LHS" or "QR" or "QO" algorithm/distribution used for the initialization phase. If "parametrization", this uses the sample method of the parametrization. scale: float or str @@ -351,7 +355,7 @@ def __init__( ) -> None: super().__init__(_DE, locals(), as_config=True) assert recommendation in ["optimistic", "pessimistic", "noisy", "mean"] - assert initialization in ["gaussian", "LHS", "QR", "parametrization"] + assert initialization in ["gaussian", "LHS", "QO", "QR", "parametrization"] assert isinstance(scale, float) or scale == "mini" if not isinstance(popsize, int): assert popsize in ["large", "dimension", "standard"] @@ -385,6 +389,7 @@ def __init__( LhsDE = DifferentialEvolution(initialization="LHS").set_name("LhsDE", register=True) QrDE = DifferentialEvolution(initialization="QR").set_name("QrDE", register=True) +QODE = DifferentialEvolution(initialization="QO").set_name("QODE", register=True) NoisyDE = DifferentialEvolution(recommendation="noisy").set_name("NoisyDE", register=True) AlmostRotationInvariantDE = DifferentialEvolution(crossover=0.9).set_name( "AlmostRotationInvariantDE", register=True diff --git a/nevergrad/optimization/optimizerlib.py b/nevergrad/optimization/optimizerlib.py index b316f73070..e98a7720cb 100644 --- a/nevergrad/optimization/optimizerlib.py +++ b/nevergrad/optimization/optimizerlib.py @@ -143,6 +143,10 @@ def __init__( "portfolio", "discreteBSO", "lengler", + "lengler2", + "lengler3", + "lenglerhalf", + "lenglerfourth", "doerr", ], f"Unkwnown mutation: '{mutation}'" if mutation == "adaptive": @@ -271,6 +275,38 @@ def _internal_ask_candidate(self) -> p.Parameter: intensity=intensity, arity=self.arity_for_discrete_mutation, ) + elif mutation == "lengler2": + alpha = 3.0 + intensity = int(max(1, self.dimension * (alpha * np.log(self.num_ask) / self.num_ask))) + data = mutator.portfolio_discrete_mutation( + pessimistic_data, + intensity=intensity, + arity=self.arity_for_discrete_mutation, + ) + elif mutation == "lengler3": + alpha = 9.0 + intensity = int(max(1, self.dimension * (alpha * np.log(self.num_ask) / self.num_ask))) + data = mutator.portfolio_discrete_mutation( + pessimistic_data, + intensity=intensity, + arity=self.arity_for_discrete_mutation, + ) + elif mutation == "lenglerfourth": + alpha = 0.4 + intensity = int(max(1, self.dimension * (alpha * np.log(self.num_ask) / self.num_ask))) + data = mutator.portfolio_discrete_mutation( + pessimistic_data, + intensity=intensity, + arity=self.arity_for_discrete_mutation, + ) + elif mutation == "lenglerhalf": + alpha = 0.8 + intensity = int(max(1, self.dimension * (alpha * np.log(self.num_ask) / self.num_ask))) + data = mutator.portfolio_discrete_mutation( + pessimistic_data, + intensity=intensity, + arity=self.arity_for_discrete_mutation, + ) elif mutation == "doerr": # Selection, either random, or greedy, or a mutation rate. assert self._doerr_index == -1, "We should have used this index in tell." @@ -384,6 +420,7 @@ class ParametrizedOnePlusOne(base.ConfiguredOptimizer): - `"portfolio"`: Random number of mutated bits (called niform mixing in Dang & Lehre "Self-adaptation of Mutation Rates in Non-elitist Population", 2016) - `"lengler"`: specific mutation rate chosen as a function of the dimension and iteration index. + - `"lengler{2|3|half|fourth}"`: variant of Lengler crossover: bool whether to add a genetic crossover step every other iteration. use_pareto: bool @@ -462,6 +499,18 @@ def __init__( DiscreteLenglerOnePlusOne = ParametrizedOnePlusOne(mutation="lengler").set_name( "DiscreteLenglerOnePlusOne", register=True ) +DiscreteLengler2OnePlusOne = ParametrizedOnePlusOne(mutation="lengler2").set_name( + "DiscreteLengler2OnePlusOne", register=True +) +DiscreteLengler3OnePlusOne = ParametrizedOnePlusOne(mutation="lengler3").set_name( + "DiscreteLengler3OnePlusOne", register=True +) +DiscreteLenglerHalfOnePlusOne = ParametrizedOnePlusOne(mutation="lenglerhalf").set_name( + "DiscreteLenglerHalfOnePlusOne", register=True +) +DiscreteLenglerFourthOnePlusOne = ParametrizedOnePlusOne(mutation="lenglerfourth").set_name( + "DiscreteLenglerFourthOnePlusOne", register=True +) DiscreteLenglerOnePlusOneT = ParametrizedOnePlusOne(tabu_length=10000, mutation="lengler").set_name( "DiscreteLenglerOnePlusOneT", register=True ) @@ -760,6 +809,8 @@ def enable_pickling(self) -> None: OldCMA = ParametrizedCMA().set_name("OldCMA", register=True) +LargeCMA = ParametrizedCMA(scale=3.0).set_name("LargeCMA", register=True) +TinyCMA = ParametrizedCMA(scale=0.33).set_name("TinyCMA", register=True) CMA = ParametrizedCMA().set_name("CMA", register=True) CMAbounded = ParametrizedCMA( scale=1.5884, popsize_factor=1, elitist=True, diagonal=True, fcmaes=False @@ -1352,6 +1403,16 @@ def __init__( RescaledCMA = Rescaled().set_name("RescaledCMA", register=True) +TinyLhsDE = Rescaled(base_optimizer=LhsDE, scale=1e-3).set_name("TinyLhsDE", register=True) +TinyQODE = Rescaled(base_optimizer=QODE, scale=1e-3).set_name("TinyQODE", register=True) +TinySQP = Rescaled(base_optimizer=SQP, scale=1e-3).set_name("TinySQP", register=True) +MicroSQP = Rescaled(base_optimizer=SQP, scale=1e-6).set_name("MicroSQP", register=True) +TinySQP.no_parallelization = True +MicroSQP.no_parallelization = True +TinySPSA = Rescaled(base_optimizer=SPSA, scale=1e-3).set_name("TinySPSA", register=True) +MicroSPSA = Rescaled(base_optimizer=SPSA, scale=1e-6).set_name("MicroSPSA", register=True) +TinySPSA.no_parallelization = True +MicroSPSA.no_parallelization = True class SplitOptimizer(base.Optimizer): @@ -1703,6 +1764,8 @@ def enable_pickling(self) -> None: optimizers=[ParametrizedCMA(random_init=True, scale=scale) for scale in [1.0, 1e-3, 1e-6]], warmup_ratio=0.33, ).set_name("MultiScaleCMA", register=True) +LPCMA = ParametrizedCMA(popsize_factor=10.0).set_name("LPCMA", register=True) +VLPCMA = ParametrizedCMA(popsize_factor=100.0).set_name("VLPCMA", register=True) class _MetaModel(base.Optimizer): @@ -1783,6 +1846,16 @@ def __init__( MetaModelOnePlusOne = ParametrizedMetaModel(multivariate_optimizer=OnePlusOne).set_name( "MetaModelOnePlusOne", register=True ) +RFMetaModelOnePlusOne = ParametrizedMetaModel(multivariate_optimizer=OnePlusOne, algorithm="rf").set_name( + "RFMetaModelOnePlusOne", register=True +) +MetaModelPSO = ParametrizedMetaModel(multivariate_optimizer=PSO).set_name("MetaModelPSO", register=True) +RFMetaModelPSO = ParametrizedMetaModel(multivariate_optimizer=PSO, algorithm="rf").set_name( + "RFMetaModelPSO", register=True +) +SVMMetaModelPSO = ParametrizedMetaModel(multivariate_optimizer=PSO, algorithm="svr").set_name( + "SVMMetaModelPSO", register=True +) MetaModelDE = ParametrizedMetaModel(multivariate_optimizer=DE).set_name("MetaModelDE", register=True) NeuralMetaModelDE = ParametrizedMetaModel(algorithm="neural", multivariate_optimizer=DE).set_name( @@ -2283,6 +2356,7 @@ def __init__( "dimension": self.dimension, "half": self.budget // 2 if self.budget else self.num_workers, "third": self.budget // 3 if self.budget else self.num_workers, + "fourth": self.budget // 4 if self.budget else self.num_workers, "tenth": self.budget // 10 if self.budget else self.num_workers, "sqrt": int(np.sqrt(self.budget)) if self.budget else self.num_workers, } @@ -2290,7 +2364,8 @@ def __init__( last_budget = None if self.budget is None else max(4, self.budget - sum(self.budgets)) assert len(optimizers) == len(self.budgets) + 1 assert all( - x in ("third", "half", "tenth", "dimension", "num_workers", "sqrt") or x > 0 for x in self.budgets + x in ("fourth", "third", "half", "tenth", "dimension", "num_workers", "sqrt") or x > 0 + for x in self.budgets ), str(self.budgets) for opt, optbudget in zip(optimizers, self.budgets + [last_budget]): # type: ignore self.optimizers.append(opt(self.parametrization, budget=optbudget, num_workers=self.num_workers)) @@ -2353,6 +2428,12 @@ def __init__( GeneticDE = Chaining([RotatedTwoPointsDE, TwoPointsDE], [200]).set_name( "GeneticDE", register=True ) # Also known as CGDE +MemeticDE = Chaining([RotatedTwoPointsDE, TwoPointsDE, DE, SQP], ["fourth", "fourth", "fourth"]).set_name( + "MemeticDE", register=True +) +QNDE = Chaining([QODE, BFGS], ["half"]).set_name("QNDE", register=True) +QNDE.no_parallelization = True +MemeticDE.no_parallelization = True discretememetic = Chaining( [RandomSearch, DiscreteLenglerOnePlusOne, DiscreteOnePlusOne], ["third", "third"] ).set_name("discretememetic", register=True) diff --git a/nevergrad/optimization/recastlib.py b/nevergrad/optimization/recastlib.py index ef7f1850ca..fd6721b356 100644 --- a/nevergrad/optimization/recastlib.py +++ b/nevergrad/optimization/recastlib.py @@ -43,6 +43,7 @@ def __init__( "Powell", ] or "NLOPT" in method + or "BFGS" in method ), f"Unknown method '{method}'" self.method = method self.random_restart = random_restart @@ -212,6 +213,8 @@ def __init__(self, *, method: str = "Nelder-Mead", random_restart: bool = False) NLOPT = NonObjectOptimizer(method="NLOPT").set_name("NLOPT", register=True) Powell = NonObjectOptimizer(method="Powell").set_name("Powell", register=True) RPowell = NonObjectOptimizer(method="Powell", random_restart=True).set_name("RPowell", register=True) +BFGS = NonObjectOptimizer(method="BFGS", random_restart=True).set_name("BFGS", register=True) +LBFGSB = NonObjectOptimizer(method="L-BFGS-B", random_restart=True).set_name("LBFGSB", register=True) Cobyla = NonObjectOptimizer(method="COBYLA").set_name("Cobyla", register=True) RCobyla = NonObjectOptimizer(method="COBYLA", random_restart=True).set_name("RCobyla", register=True) SQP = NonObjectOptimizer(method="SLSQP").set_name("SQP", register=True) diff --git a/nevergrad/optimization/recorded_recommendations.csv b/nevergrad/optimization/recorded_recommendations.csv index 9edeb7d0cd..e2730f402c 100644 --- a/nevergrad/optimization/recorded_recommendations.csv +++ b/nevergrad/optimization/recorded_recommendations.csv @@ -10,6 +10,7 @@ AvgHammersleySearch,0.2104283942,-1.1503493804,-0.1397102989,0.8416212336,,,,,,, AvgHammersleySearchPlusMiddlePoint,0.5244005127,-1.1503493804,-0.1397102989,0.8416212336,,,,,,,,,,,, AvgMetaRecenteringNoHull,0.6962783408,-0.1604212797,-0.6145401633,0.8813954947,,,,,,,,,,,, AvgRandomSearch,1.7163318206,-1.0929434198,-0.5715059829,0.2691248199,,,,,,,,,,,, +BFGS,0.4777665424,-0.702409937,0.199534994,-0.691876864,,,,,,,,,,,, BPRotationInvariantDE,-0.5273201105,-0.156805946,-1.2551455501,3.327306828,2.3993996197,1.2830831401,0.8566151549,-1.6450076557,,,,,,,, CM,1.0082049151,-0.9099785499,-1.025147209,1.2046460074,,,,,,,,,,,, CMA,1.012515477,-0.9138805701,-1.029555946,1.2098418178,,,,,,,,,,,, @@ -117,8 +118,11 @@ HullCenterHullAvgScrHammersleySearchPlusMiddlePoint,-1.2815515655,0.0,0.43072729 HyperOpt,0.415676949,-0.9950339707,1.9010341707,-0.5270914077,,,,,,,,,,,, IsoEMNA,1.012515477,-0.9138691467,-1.0295302074,1.2097964496,,,,,,,,,,,, IsoEMNATBPSA,0.0,0.0,0.0,0.0,,,,,,,,,,,, +LBFGSB,0.4777191859,-0.7022021417,0.1999598225,-0.7018655626,,,,,,,,,,,, LHSSearch,-0.3978418928,0.827925915,1.2070034191,1.3637174061,,,,,,,,,,,, +LPCMA,1.012515477,-0.9138805701,-1.029555946,1.2098418178,,,,,,,,,,,, LSCMA,1.012515477,-0.9138805701,-1.029555946,1.2098418178,,,,,,,,,,,, +LargeCMA,1.5056169113,0.4165437382,1.9223313977,1.5820573214,,,,,,,,,,,, LargeHaltonSearch,-67.4489750196,43.0727299295,-25.3347103136,-56.5948821933,,,,,,,,,,,, LhsDE,-0.8072358182,0.6354687554,1.575403308,1.1808277036,2.5888168575,-0.1627990771,-3.656466139,-1.040475202,,,,,,,, LhsHSDE,-0.8072358182,0.6354687554,1.575403308,1.1808277036,2.5888168575,-0.1627990771,-3.656466139,-1.040475202,,,,,,,, @@ -132,6 +136,7 @@ MetaNGOpt10,0.0051270781,-0.1202306759,-1.0295302074,1.2098266949,,,,,,,,,,,, MetaRecentering,0.6962783408,-0.1604212797,-0.6145401633,0.8813954947,,,,,,,,,,,, MetaTuneRecentering,0.925614596,-0.2132599411,-0.8169539561,1.1717046001,,,,,,,,,,,, MicroCMA,1.0125e-06,-9.139e-07,-1.0296e-06,1.2098e-06,,,,,,,,,,,, +MicroSQP,0.0,-0.0,0.0,0.0,,,,,,,,,,,, MilliCMA,0.0010125155,-0.0009138806,-0.0010295559,0.0012098418,,,,,,,,,,,, MiniDE,0.8273276988,-1.2921051963,-0.4797521288,0.2138608624,0.7088815721,0.7346249014,-2.6392592028,-1.0729615222,,,,,,,, MiniLhsDE,-0.0313128807,0.2738703026,-0.1988242191,0.9942001938,0.7167500893,-0.0350394443,-1.5341684983,-0.3039246928,,,,,,,, @@ -191,6 +196,8 @@ PortfolioNoisyDiscreteOnePlusOne,0.0,0.2169245995,-0.4007924638,1.4805504707,,,, PortfolioOptimisticNoisyDiscreteOnePlusOne,0.0,0.2169245995,-0.4007924638,1.4805504707,,,,,,,,,,,, Powell,0.0,0.0,0.0,0.0,,,,,,,,,,,, PymooNSGA2,-0.1682383832,0.2282461031,-0.5661945935,1.2331772922,,,,,,,,,,,, +QNDE,-1.8339146358,0.0,0.4307272993,0.8416212336,,,,,,,,,,,, +QODE,-0.0940516694,-1.0696123225,1.6018350418,2.3799747983,2.0125506768,-0.5298097519,-0.2954380981,-2.684839746,,,,,,,, QORandomSearch,0.0051270781,-0.1202276702,-0.8069818786,2.871819395,,,,,,,,,,,, QOScrHammersleySearch,-0.9674215661,0.0,0.4307272993,0.8416212336,,,,,,,,,,,, QrDE,-1.2126139659,-0.1022395502,0.5946113194,0.5365857957,3.38269024,1.3718912761,-3.2278357447,-1.2566858857,,,,,,,, @@ -238,8 +245,13 @@ TEAvgLHSSearch,-0.3978418928,0.827925915,1.2070034191,1.3637174061,,,,,,,,,,,, TEAvgRandomSearch,1.7163318206,-1.0929434198,-0.5715059829,0.2691248199,,,,,,,,,,,, TEAvgScrHammersleySearch,1.3829941271,-0.318639364,-1.2206403488,1.7506860713,,,,,,,,,,,, TEAvgScrHammersleySearchPlusMiddlePoint,-1.2815515655,0.0,0.4307272993,0.8416212336,,,,,,,,,,,, +TinyCMA,0.3341301074,-0.3015805881,-0.3397534622,0.3992477999,,,,,,,,,,,, +TinyQODE,-0.0018339146,0.0,0.0004307273,0.0008416212,,,,,,,,,,,, +TinySPSA,9.32387e-05,-9.32386e-05,9.32386e-05,9.32387e-05,,,,,,,,,,,, +TinySQP,9.999e-07,-1.6e-06,0.0,7.9999e-06,,,,,,,,,,,, TripleCMA,-1.3347990875,-1.2991318815,-0.0795064677,1.563956499,,,,,,,,,,,, TwoPointsDE,1.1400386808,0.3380024444,0.4755144618,2.6390460807,0.6911075733,1.111235567,-0.2576843178,-1.1959512855,,,,,,,, +VLPCMA,1.012515477,-0.9138805701,-1.029555946,1.2098418178,,,,,,,,,,,, Zero,0.0,0.0,0.0,0.0,,,,,,,,,,,, cGA,0.0509603282,0.1315286387,-0.0393104602,0.7333300801,,,,,,,,,,,, discretememetic,0.0,0.0,0.0,1.095956118,,,,,,,,,,,, diff --git a/nevergrad/optimization/test_optimizerlib.py b/nevergrad/optimization/test_optimizerlib.py index 4d44647459..226c2266e9 100644 --- a/nevergrad/optimization/test_optimizerlib.py +++ b/nevergrad/optimization/test_optimizerlib.py @@ -212,14 +212,20 @@ def test_infnan(name: str) -> None: @pytest.mark.parametrize("name", registry) # type: ignore def test_optimizers(name: str) -> None: """Checks that each optimizer is able to converge on a simple test case""" - if sum([ord(c) for c in name]) % 4 > 0 and name not in [ - "DE", - "CMA", - "OnePlusOne", - "Cobyla", - "DiscreteLenglerOnePlusOne", - "PSO", - ]: + if ( + sum([ord(c) for c in name]) % 4 > 0 + and name + not in [ + "DE", + "CMA", + "OnePlusOne", + "Cobyla", + "DiscreteLenglerOnePlusOne", + "PSO", + ] + or "Tiny" in name + or "Micro" in name + ): raise SkipTest("Too expensive: we randomly skip 3/4 of these tests.") if name in ["CMAbounded", "NEWUOA"]: # Not a general purpose optimization method. return diff --git a/nevergrad/optimization/test_suggest.py b/nevergrad/optimization/test_suggest.py index b1a5f021c6..d7a02b5f22 100644 --- a/nevergrad/optimization/test_suggest.py +++ b/nevergrad/optimization/test_suggest.py @@ -86,7 +86,7 @@ def good_at_suggest(name: str) -> bool: @skip_win_perf # type: ignore -@pytest.mark.parametrize("name", [r for r in registry if "iscre" in r and good_at_suggest(r)]) # type: ignore +@pytest.mark.parametrize("name", [r for r in registry if "iscre" in r and good_at_suggest(r) and ("Lengler" not in r or "LenglerOne" in r)]) # type: ignore def test_harder_suggest_optimizers(name: str) -> None: """Checks that discrete optimizers are good when a suggestion is nearby.""" instrum = ng.p.Array(shape=(100,)).set_bounds(0.0, 1.0) diff --git a/scripts/dagstuhloid.sh b/scripts/dagstuhloid.sh new file mode 100755 index 0000000000..1b67791885 --- /dev/null +++ b/scripts/dagstuhloid.sh @@ -0,0 +1,25 @@ +#!/bin/bash +#SBATCH --job-name=dagstuhloid +#SBATCH --output=dagstuhloid_%A_%a.out +#SBATCH --error=dagstuhloid_%A_%a.err +#SBATCH --time=72:00:00 +#SBATCH --partition=scavenge +#SBATCH --nodes=1 +#SBATCH --cpus-per-task=67 +#SBATCH -a 0-61 + + +tasks=(aquacrop_fao bonnans double_o_seven fishing keras_tuning mldakmeans mltuning mono_rocket multiobjective_example multiobjective_example_hd multiobjective_example_many_hd naive_seq_keras_tuning naive_seq_mltuning naive_veryseq_keras_tuning naivemltuning nano_naive_seq_mltuning nano_naive_veryseq_mltuning nano_seq_mltuning nano_veryseq_mltuning oneshot_mltuning pbbob pbo_reduced_suite reduced_yahdlbbbob seq_keras_tuning seq_mltuning sequential_topology_optimization spsa_benchmark topology_optimization ultrasmall_photonics ultrasmall_photonics2 veryseq_keras_tuning yabbob yabigbbob yaboundedbbob yaboxbbob yahdbbob yamegapenbbob yamegapenboundedbbob yamegapenboxbbob yanoisybbob yaonepenbbob yaonepenboundedbbob yaonepenboxbbob yaonepennoisybbob yaonepenparabbob yaonepensmallbbob yaparabbob yapenbbob yapenboundedbbob yapenboxbbob yapennoisybbob yapenparabbob yapensmallbbob yasmallbbob yatinybbob yatuningbbob deceptive multimodal hdmultimodal sequential_instrum_discrete instrum_discrete) +tasks=(aquacrop_fao bonnans deceptive double_o_seven fishing instrum_discrete keras_tuning mldakmeans mltuning mono_rocket multimodal multiobjective_example multiobjective_example_hd multiobjective_example_many_hd naive_seq_keras_tuning naive_seq_mltuning naive_veryseq_keras_tuning naivemltuning nano_naive_seq_mltuning nano_naive_veryseq_mltuning nano_seq_mltuning nano_veryseq_mltuning neuro_oneshot_mltuning pbbob pbo_reduced_suite reduced_yahdlbbbob rocket seq_keras_tuning seq_mltuning sequential_instrum_discrete sequential_topology_optimization spsa_benchmark topology_optimization ultrasmall_photonics ultrasmall_photonics2 veryseq_keras_tuning yabbob yabigbbob yaboundedbbob yaboxbbob yahdbbob yahdnoisybbob yamegapenbbob yamegapenboundedbbob yamegapenboxbbob yanoisybbob yaonepenbbob yaonepenboundedbbob yaonepenboxbbob yaonepennoisybbob yaonepenparabbob yaonepensmallbbob yaparabbob yapenbbob yapenboundedbbob yapenboxbbob yapennoisybbob yapenparabbob yapensmallbbob yasmallbbob yatinybbob yatuningbbob) + +task=${tasks[SLURM_ARRAY_TASK_ID]} + +echo task attribution $SLURM_ARRAY_TASK_ID $task +echo Starting at +date +# num_workers is the number of processes. Maybe use a bit more than the number of cores at the line "cpus-per-task" +# above. +python -m nevergrad.benchmark $task --num_workers=71 +echo task over $SLURM_ARRAY_TASK_ID $task +echo Finishing at +date diff --git a/scripts/latexize.sh b/scripts/latexize.sh new file mode 100755 index 0000000000..67339c5e0b --- /dev/null +++ b/scripts/latexize.sh @@ -0,0 +1,70 @@ +#!/bin/bash + +allplots="" + +# artificial noise-free single objective unconstrained or box-constrained +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep bbob | grep yabbob `" +allplots="$allplots `ls -d *_plots/ | egrep 'multimodal|deceptive'`" +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep bbob | grep -v yabbob | grep -v pen`" + +# penalized +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep bbob | grep pen`" + +# tuning +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep -v bbob | egrep -v 'multimodal|deceptive' | grep -v photonics | grep -v topology | grep -v rock | grep tuning `" + +# discrete +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep -v bbob | egrep -v 'multimodal|deceptive' | grep -v photonics | grep -v topology | grep -v rock | grep -v tuning | egrep 'pbo|discr|bonn'`" + +# rest of RW, besides photonics, topology, rockets +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep -v bbob | egrep -v 'multimodal|deceptive' | grep -v photonics | grep -v topology | grep -v rock | grep -v tuning | egrep -v 'pbo|discr|bonn' | grep -v multiobj | grep -v spsa ` " +# RW +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep -v bbob | grep photonics `" +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep -v bbob | grep topology`" +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep -v bbob | grep rocket `" + +# multiobj +allplots="$allplots `ls -d *_plots/ | grep -v 'noisy|spsa' | grep -v bbob | grep multiobj`" + +# Noisy optimization +allplots="$allplots `ls -d *_plots/ | egrep -i 'noisy|spsa'`" + +echo $allplots + +( +cat scripts/tex/beginning.tex +for u in $allplots +do +echo "\\subsubsection{`echo $u | sed 's/_plots.$//g'`}" | sed 's/_/ /g'| sed 's/aquacrop/(RW) &/g' | sed 's/rocket/(RW)&/g' | sed 's/fishing/(RW)&/g' | sed 's/MLDA/(RW)&/g' | sed 's/keras/(RW)&/g' | sed 's/mltuning/(RW)&/g' | sed 's/powersystems/(RW)&/g' | sed 's/mixsimulator/(RW)&/g' | sed 's/olympus/(RW)&/g' | sed 's/double.o.seven/(RW)&/g' +cat scripts/txt/`echo $u | sed 's/_plots/.txt/g'` +echo '\begin{enumerate}' ; cat $u/fig*.txt | grep -v pngranking | sed 's/[_=]/ /g' | sed 's/ algo.[0-9]*:/\\item/g' ; echo '\item[] ~\ ~' ; echo '\end{enumerate}' +ls ${u}/*all_pure.png ${u}/xpresults_all.png | sed 's/.*/\\includegraphics[width=.8\\textwidth]{{&}}\\\\/g' +done +echo '\section{Conclusion}' +cat scripts/tex/conclusion.tex +echo '\appendix' +echo '\section{Competence maps}' +for u in $allplots +do +echo "\\subsection{`echo $u | sed 's/_plots.$//g'`}" | sed 's/_/ /g'| sed 's/aquacrop/(RW) &/g' | sed 's/rocket/(RW)&/g' | sed 's/fishing/(RW)&/g' | sed 's/MLDA/(RW)&/g' | sed 's/keras/(RW)&/g' | sed 's/mltuning/(RW)&/g' | sed 's/powersystems/(RW)&/g' | sed 's/mixsimulator/(RW)&/g' | sed 's/olympus/(RW)&/g' | sed 's/double.o.seven/(RW)&/g' + +for v in `grep -c none ${u}/comp*.tex | grep ':0' | sed 's/:.*//g'` +do +echo "\\subsubsection*{$v}" | sed 's/[_=]/ /g' | sed 's/\.tex//g' +ls `ls $v | sed 's/\.tex/\.pdf/g'` | sed 's/.*/\\includegraphics[width=.8\\textwidth]{{&}}\\\\/g' +done +done +cat scripts/tex/end.tex ) > dagstuhloid.tex +sed -i 's/\\subsubsection{yabbob}/\\subsection{Artificial noise-free single objective}&/g' dagstuhloid.tex +sed -i 's/\\subsubsection{yamegapenbbob}/\\subsection{Constrained BBOB variants}&/g' dagstuhloid.tex +sed -i 's/\\subsubsection{(RW)keras tuning}/\\subsection{Real world machine learning tuning}&/g' dagstuhloid.tex +sed -i 's/\\subsubsection{bonnans}/\\subsection{Discrete optimization}&/g' dagstuhloid.tex +sed -i 's/\\subsubsection{(RW) aquacrop fao}/\\subsection{Real world, other than machine learning}&/g' dagstuhloid.tex +sed -i 's/.*control.*//g' dagstuhloid.tex +sed -i 's/\\subsubsection{multiobjective example hd}/\\subsection{Multiobjective problemes}&/g' dagstuhloid.tex +sed -i 's/\\subsubsection{spsa benchmark}/\\subsection{Noisy optimization}&/g' dagstuhloid.tex +cp scripts/tex/biblio.bib . +pdflatex dagstuhloid.tex +bibtex dagstuhloid.aux +pdflatex dagstuhloid.tex +pdflatex dagstuhloid.tex diff --git a/scripts/plot_dagstuhloid.sh b/scripts/plot_dagstuhloid.sh new file mode 100755 index 0000000000..2079b065ab --- /dev/null +++ b/scripts/plot_dagstuhloid.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Do nothing if there is no CSV. +if compgen -G "*.csv" > /dev/null; then + +# First we run all nevergrad plotting. +for i in *.csv +do + python -m nevergrad.benchmark.plotting --max_combsize=2 --competencemaps=1 $i & +done +wait + + +# Second we do pdflatex +for i in *.csv +do + pushd `echo $i | sed 's/\.csv/_plots/g'` + if compgen -G "comp*.tex" > /dev/null; then + for t in comp*.tex + do + pdflatex $t & + done + fi # end of "there are competence map files" + popd +done +wait + +fi # End of "there is something to do". + +# tar -zcvf ~/dag.tgz *_plots +scripts/latexize.sh + +tar -zcvf dagstuhloid.tgz dagstuhloid.pdf *.csv diff --git a/scripts/tex/beginning.tex b/scripts/tex/beginning.tex new file mode 100644 index 0000000000..485a3f9d28 --- /dev/null +++ b/scripts/tex/beginning.tex @@ -0,0 +1,106 @@ +\documentclass{article} + + +\usepackage{graphicx} +\usepackage{url} +\usepackage{amsmath} +\usepackage{amssymb} + +\def\E{\mathbb{E}} +\begin{document} + +\title{Dagsthuloid benchmarking} +%\author{OT, Photonics guys, Keras/mltuning guys, D. people, INFRA people} + +\maketitle + + +\begin{abstract} +At the Dagstuhl seminar 23251 (June 2023), many principles of benchmarking were discussed: reproducibility, open sourcing, cases with budget $>$ 100 times the dimension being the exception rather than the rule, cases with 10 $\times$ budget $<$ dimension exist, real-world benchmarking is critical for validating artificial benchmarks. Also, bugs in existing implementations have been discussed. + +We want everything to be easy to rerun entirely from scratch. This might need a significant computational power. +However, it is possible to run manually and separately some benchmarks: one can easily edit the main script and reduce the number of benchmarks, possibly to a single benchmark at a time, and also we rerun periodically all the benchmarks and accept pull requests so that a limited computational power should not be an obstacle to reproducibility. +\end{abstract} +\tableofcontents + +\section{Introduction} + +In artificial benchmarking platforms, some talks pointed out the importance of the distribution of optima. Therefore, we propose parameters of the distribution of optima, in the PBBOB (parametric BBOB) benchmark. +We compare implementations, not abstract algorithms. The detailed implementations are freely available in Nevergrad and anyone can propose a modification. +A large part of the benchmarks are based on real-world data or reasonably close to this. +Benchmarking is a serious matter: recent news on machine learning papers about black-box optimization show that poorly reproducible benchmarking is detrimental to science. +A discussion of poor reproducibility in deep-learning assisted optimization is available in \cite{rlgoogle}; see also \cite{ecnassurvey,repronas,pham2018efficient,real2019regularized} showing how some simple methods might, in spite of promising claims, outperform heavy GPU-based black-box optimization methods. Also, \cite{leakage} mentions various examples of results difficult to reproduce, in general, in the machine learning community. + +Naming: “oid” means “similar to”. The benchmark is called Dagstuhloid because it is inspired by Dagstuhl talks. The responsibility is entirely ours, though. + +\subsection{URLs: finding this on WWW} +The main place for discussing the Dagstuhloid benchmark is \url{https://www.facebook.com/groups/nevergradusers/}. + +The code is https://github.com/facebookresearch/nevergrad + +\subsection{Reproducing results} +How to reproduce these results: +\begin{itemize} +\item Install Nevergrad by “git clone” (see details at the URL above). +\item Running: +\begin{itemize} +\item Without cluster: "python -m nevergrad.benchmark yabbob --num\_workers=67" if you want to run YABBOB on 67 cores. +\item With cluster: Run “sbatch scripts/dagstuhloid.sh” for launching experiments with Slurm. With other cluster management tools, you might have to adapt the script. It is written assuming slurm: it should be feasible to adapt it to other benchmarks. This script is randomized: you might have to run it several times for getting enough results. +\end{itemize} +\item For plotting results, run “scripts/dagstuhloid\_plot.sh”. Of course, some data might be missing if not enough runs are complete. Note that in case of latex unavailable or incompatible, all figures are nonetheless available. +\end{itemize} + +\subsection{What if your computational power is not sufficient ?} +Please note that creating a pull request and pinging us at \url{https://www.facebook.com/groups/nevergradusers/} is a simple solution to get heavy computations done, we are more than happy to run what you need if the computational cost is reasonable. + +\section{Comparison with other benchmarks} +We need correctly tuned algorithms. We can not guarantee that algorithms are all correctly parametrized: however, everything is free and anyone can implement her own favorite algorithm and/or propose a new tuning. For example, compared to \cite{lamcts}, our artificial benchmarks are not with optimum in zero (which leads to a strong advantage for algorithms initialized 100 times closer to 0 than others) and for our real-world benchmarks we do not have completely different scalings for different methods unless the scaling of the initialization is on purpose as for some methods scaling the initialization specifically as a function of dimension/budget: of course, such cases deserve specific discussions. +For each benchmark, the detailed setup is documented at \url{https://github.com/facebookresearch/nevergrad/blob/main/nevergrad/benchmark/experiments.py}. +Ctrl-F with the name of the benchmark should provide all details. +The algorithms are all readable in \url{https://github.com/facebookresearch/nevergrad/blob/main/nevergrad/optimization/}. +For each benchmark we provide both: +\begin{itemize} +\item A heatmap, showing the frequency at which a method (row) outperforms on average another method (col). Methods are ordered by average such frequency, over all other methods. +The columns show the methods with the number of settings they were able to tackle (for example, some methods have no parallel version and therefore do not fill all settings). +\item A convergence curve, with the budget on the x-axis and the average (over all budgets) normalized (linearly, to +0-1) loss. Note that some benchmarks do not have the same functions for the different values of the budget. Therefore we +might have a rugged curve, not monotonous at all. This is even more the case for wizards such as NGOpt or NGOptRW which +make decisions based on the budget: they might do a bad choice for some values of the budget, leading to irregular +curves. +\end{itemize} +Note that the ranking of the rows or columns in the heatmap, and the ranking of the curves, do not have to match. As +detailed above, runs for different budgets are independent, and we take all budget values in the statistics used for the +heatmaps: therefore the ranking in the heatmap takes into account the low budget as much as the high budgets of the +experiments. Also, the heatmap is about frequencies of a method outperforming another method: so, the gap does not +matter. So robustness is taken into account differently: the impact of this difference can be big. +The heatmap presented here is the so-called ``pure'' one, i.e. only algorithms which tackled all problems are presented. +%On the other hand, the convergence curves contain all methods TODO: just keep in minds that those not in the heatmap might be averaged on a smaller set of functions, hence the comparison might be unfair. + +The appendix contains many competence maps. Competence maps show, for a given pair of variables/values, which algorithms performs best on average. This is quite good for interpretability, but big. + +Whereas most platforms do runs for a single budget, and then plot curves up to that budget, we do run the algorithms separately e.g. for budget 100, 200, 400 and 800. This implies that curves are less smooth. The reason for this is that smooth curves obtained by truncation can give a false sense of smoothness and falsify tests if users assume independance between results obtained for different values. + +For noisy optimization, we assume unbiased noise in the artificial benchmarks. However, the real world ones are biased in the same sense as tuning in hyperparameter tuning: overfitting and underfitting can happen. +We differentiate ask, tell and recommend: this is critical. Some platforms do a simple ask and tell only and assume that +algorithms can, for free, guess which of their visited points is best. This is incorrect and misleading, as pointed out +in the noisy BBOB benchmark long ago~\cite{bbobissue1,bbobissue2,bbobissue3,bbobissue4}: instead of being based on a recommendation, the reported result is based on the minimum { {\em{expected}} fitness} over all visited points, as if it was possible to know (at cost zero) which of the visited points is the best. +More formally, noisy optimization algorithms typically have iterations defined by $(x_{n+1},\hat +x_{n+1})=Algorithm((x_1,\dots,x_n),(y_1,\dots,y_n))$. The $x_n$ are the iterates at which the noisy objective function +is evaluated, the $y_n$ are the noisy loss values, and the $\hat x_n$ are the recommendations, i.e. approximations of +the optimum as provided by the algorithm. In ask/tell format, $x_n$ is provided by ``ask'', the algorithm is informed of +$(x_n,y_n)$ by ``tell'' - and we need a method ``recommend'' for providing the recommendations. Ask and recommend are +distinct because $x_n$ and $\hat x_n$ are distinct. The regret is evaluated at $\hat x_n$, and it is +known~\cite{fabian,decocknoise} that, with a significant noise level, fast rates~\cite{fabian,chen1988} for the simple regret can only be obtained using $x_n$ far from the optimum (i.e. $x_n\neq \hat x_n$) for acquiring knowledge. Plotting results using $\E\inf_{i\leq n}(\E f)(x_i)$ (or any other criterion based on the $x_i$ rather than the $\hat x_n$) instead of $\E f(\hat x_n)$ is convenient for reusing noise-free software, but wrong: the best algorithms for such criteria are those which randomly explore around $\hat x_n$ rather than those which do clever explorations further from the optimum. We underline that, in spite of this bug for the noisy case, BBOB has been extremely useful for making benchmarking more rigorous in BBO. + +Also in terms of noise, we do not plot the best result so far but the value of the current recommendation. An approach based on best value (with noise) so far implies biased results. + +``(RW)'' means that the benchmark is real world. Note that the definition of ``real world'' is not so simple: we are entirely in silico, and in some cases the model has been simplified. This just means that we consider this as sufficiently real-world for being tagged that way. + +As pointed out during the seminar, ordered discrete is different from unordered discrete. Some of benchmarks include ordered discrete and some include unordered discrete: Nevergrad can use typed variables (as documented in \url{https://facebookresearch.github.io/nevergrad/optimization.html}) and we use this. + +The vast family of benchmarks in Nevergrad is used for tuning an algorithm selector, termed NGOpt. A variant termed NGOptRW is adapted for real-world contexts. However, we do not recommend running it blindly: in spite of the efforts for designing these algorithms, there are many cases in which another algorithm will perform vastly better. + +\section{Experimental results} + + + diff --git a/scripts/tex/biblio.bib b/scripts/tex/biblio.bib new file mode 100644 index 0000000000..8d3965f12d --- /dev/null +++ b/scripts/tex/biblio.bib @@ -0,0 +1,236 @@ +@misc{bbobissue1, +author={Remi Coulom}, +howpublished={\url{http://lists.lri.fr/pipermail/bbob-discuss/2012-April/000257.html}}, +year={2012}} +@misc{bbobissue2, +author={Hans-Georg Beyer}, +howpublished={\url{http://lists.lri.fr/pipermail/bbob-discuss/2012-April/000270.html}}, +year={2012}} +@misc{bbobissue3, +author={Hans-Georg Beyer}, +howpublished={\url{http://lists.lri.fr/pipermail/bbob-discuss/2012-April/000258.html}}, +year={2012} +} +@misc{bbobissue4, +author={Remi Coulom}, +howpublished={\url{http://lists.lri.fr/pipermail/bbob-discuss/2012-April/000252.html}}, +year={2012} +} + +@inproceedings{decocknoise, + author = {Decock, J{\'e}r{\'e}mie and Teytaud, Olivier}, + title = {Noisy Optimization Complexity Under Locality Assumption}, + booktitle = {Proceedings of the Twelfth Workshop on Foundations of Genetic Algorithms XII}, + series = {FOGA XII '13}, + year = {2013}, + unusedisbn = {978-1-4503-1990-4}, + unusedunusedlocation = {Adelaide, Australia}, + pages = {183--190}, + numpages = {8}, + unusedurl = {http://doi.acm.org/10.1145/2460239.2460256}, + unusedunuseddoi = {10.1145/2460239.2460256}, + acmid = {2460256}, + publisher = {ACM}, + unusedunusedaddress = {New York, NY, USA}, + unusedkeywords = {black box complexity model, local sampling, noisy optimization}, +} +@article{fabian, +author = "Fabian, Vaclav", +unusedunuseddoi = "10.1214/aoms/1177699070", +fjournal = "The Annals of Mathematical Statistics", +journal = "Ann. Math. Statist.", +month = "02", +number = "1", +pages = "191--200", +publisher = "The Institute of Mathematical Statistics", +title = "Stochastic Approximation of Minima with Improved Asymptotic Speed", +unusedurl = "https://doi.org/10.1214/aoms/1177699070", +volume = "38", +year = "1967" +} + + + +@article{chen1988, + author = "Chen, Hung", + unusedunuseddoi = "10.1214/aos/1176350965", + journal = "The Annals of Statistics", + month = "Sep", + number = "3", + pages = "1330--1334", + publisher = "The Institute of Mathematical Statistics", + title = "Lower Rate of Convergence for Locating a Maximum of a Function", + unusedurl = "http://dx.doi.org/10.1214/aos/1176350965", + volume = "16", + year = "1988" +} + + +@misc{micropredictions2, + author = {MicroPredictions}, + author = {Petter Cotton}, + title = {MicroPredictions ELO ratings}, + year = "2020", + howpublished = {\url{https://microprediction.github.io/optimizer-elo-ratings/}}, + unusednote = "[Online; accessed 27-April-2021]", +} + +@misc{micropredictions1, + author = {Petter Cotton}, + title = {An introduction to Z-streams (and collective micropredictions)}, + year = "2020", + howpublished = {\url{https://www.linkedin.com/pulse/short-introduction-z-streams-peter-cotton-phd/}}, + unusednote = "[Online; accessed 27-March-2021]" + } + +@book{rechenberg73, + title = {Evolutionstrategie: Optimierung Technischer Systeme nach Prinzipien des Biologischen Evolution}, + author = {Ingo Rechenberg}, + year = 1973, + publisher = {Fromman-Holzboog Verlag}, + unusedaddress = {Stuttgart} +} + + +@inproceedings{lengler, +author = {Doerr, Benjamin and Doerr, Carola and Lengler, Johannes}, +title = {Self-Adjusting Mutation Rates with Provably Optimal Success Rules}, +year = {2019}, +unusedisbn = {9781450361118}, +publisher = {Association for Computing Machinery}, +unusedaddress = {New York, NY, USA}, +unusedurl = {https://doi.org/10.1145/3321707.3321733}, +unusedunuseddoi = {10.1145/3321707.3321733}, +abstract = {The one-fifth success rule is one of the best-known and most widely accepted techniques to control the parameters of evolutionary algorithms. While it is often applied in the literal sense, a common interpretation sees the one-fifth success rule as a family of success-based updated rules that are determined by an update strength F and a success rate s. We analyze in this work how the performance of the (1+1) Evolutionary Algorithm (EA) on LeadingOnes depends on these two hyper-parameters. Our main result shows that the best performance is obtained for small update strengths F = 1+o(1) and success rate 1/e. We also prove that the runtime obtained by this parameter setting is asymptotically optimal among all dynamic choices of the mutation rate for the (1+1) EA, up to lower order error terms. We show similar results for the resampling variant of the (1+1) EA, which enforces to flip at least one bit per iteration.}, +booktitle = {Proceedings of the Genetic and Evolutionary Computation Conference}, +pages = {1479–1487}, +numpages = {9}, +unusedlocation = {Prague, Czech Republic}, +series = {GECCO '19} +} + +@inproceedings{lamcts, + author = {Linnan Wang and + Rodrigo Fonseca and + Yuandong Tian}, + title = {Learning Search Space Partition for Black-box Optimization using Monte + Carlo Tree Search}, + booktitle = {Advances in Neural Information Processing Systems 33: Annual Conference + on Neural Information Processing Systems 2020, NeurIPS 2020, December + 6-12, 2020, virtual}, + year = {2020}, + url = {https://proceedings.neurips.cc/paper/2020/hash/e2ce14e81dba66dbff9cbc35ecfdb704-Abstract.html}, + timestamp = {Tue, 19 Jan 2021 15:57:19 +0100}, +, + bibsource = {dblp computer science bibliography, https://dblp.org} +} + +@misc{leakage, + doi = {10.48550/ARXIV.2207.07048}, + + url = {https://arxiv.org/abs/2207.07048}, + + author = {Kapoor, Sayash and Narayanan, Arvind}, + + keywords = {Machine Learning (cs.LG), Artificial Intelligence (cs.AI), Methodology (stat.ME), FOS: Computer and information sciences, FOS: Computer and information sciences}, + + title = {Leakage and the Reproducibility Crisis in ML-based Science}, + + publisher = {arXiv}, + + year = {2022}, + + copyright = {arXiv.org perpetual, non-exclusive license} +} + + +@misc{rlgoogle, + title={The False Dawn: Reevaluating Google's Reinforcement Learning for Chip Macro Placement}, + author={Igor L. Markov}, + year={2023}, + eprint={2306.09633}, + archivePrefix={arXiv}, + primaryClass={cs.LG} +} + + + +@misc{ecnassurvey, + title={A Survey on Evolutionary Neural Architecture Search}, + author={Yuqiao Liu and Yanan Sun and Bing Xue and Mengjie Zhang and Gary G. Yen and Kay Chen Tan}, + year={2021}, + eprint={2008.10937}, + archivePrefix={arXiv}, + primaryClass={cs.NE} +} +@misc{repronas, + title={Random Search and Reproducibility for Neural Architecture Search}, + author={Liam Li and Ameet Talwalkar}, + year={2019}, + eprint={1902.07638}, + archivePrefix={arXiv}, + primaryClass={cs.LG} +} +@misc{pham2018efficient, + title={Efficient Neural Architecture Search via Parameter Sharing}, + author={Hieu Pham and Melody Y. Guan and Barret Zoph and Quoc V. Le and Jeff Dean}, + year={2018}, + eprint={1802.03268}, + archivePrefix={arXiv}, + primaryClass={cs.LG} +} +@misc{real2019regularized, + title={Regularized Evolution for Image Classifier Architecture Search}, + author={Esteban Real and Alok Aggarwal and Yanping Huang and Quoc V Le}, + year={2019}, + eprint={1802.01548}, + archivePrefix={arXiv}, + primaryClass={cs.NE} +} + + +@InProceedings{mlis, +author="Beyer, Hans-Georg", +title="Mutate large, but inherit small! On the analysis of rescaled mutations in ( $( 1,\lambda)$-ES with noisy fitness data", +booktitle="Parallel Problem Solving from Nature --- PPSN V", +year="1998", +publisher="Springer", +unusedunusedaddress="Berlin, Heidelberg", +pages="109--118", +abstract="The paper presents the asymptotical analysis of a technique for improving the convergence of evolution strategies (ES) on noisy fitness data. This technique that may be called ``Mutate large, but inherit small'', is discussed in light of the EPP (evolutionary progress principle). The derivation of the progress rate formula is sketched, its predictions are compared with experiments, and its limitations are shown. The dynamical behavior of the ES is investigated. It will be shown that standard self-adaptation has considerable problems to drive the ES in its optimum working regime. Remedies are provided to improve the self-adaptation.", +unusedisbn="978-3-540-49672-4" +} + +@inproceedings{vasilfoga, + author = {Vasil Khalidov and + Maxime Oquab and + J{\'{e}}r{\'{e}}my Rapin and + Olivier Teytaud}, + title = {Consistent population control: generate plenty of points, but with + a bit of resampling}, + booktitle = {Proceedings of the 15th {ACM/SIGEVO} Conference on Foundations of + Genetic Algorithms, {FOGA} 2019, Potsdam, Germany, August 27-29, 2019}, + pages = {116--123}, + year = {2019}, + crossref = {DBLP:conf/foga/2019}, + url = {https://doi.org/10.1145/3299904.3340312}, + unuseddoi = {10.1145/3299904.3340312}, +} + +@INPROCEEDINGS{pde, + author={Abbass, H.A. and Sarker, R. and Newton, C.}, + booktitle={Proceedings of the 2001 Congress on Evolutionary Computation (IEEE Cat. No.01TH8546)}, + title={PDE: a Pareto-frontier differential evolution approach for multi-objective optimization problems}, + year={2001}, + volume={2}, + number={}, + pages={971-978 vol. 2}, + doi={10.1109/CEC.2001.934295}} + + +@inproceedings{mode, + title={DEMO: Differential Evolution for Multiobjective Optimization}, + author={Tea Robic and Bogdan Filipi}, + booktitle={International Conference on Evolutionary Multi-Criterion Optimization}, + year={2005} + } diff --git a/scripts/tex/conclusion.tex b/scripts/tex/conclusion.tex new file mode 100644 index 0000000000..9af3a9de44 --- /dev/null +++ b/scripts/tex/conclusion.tex @@ -0,0 +1,94 @@ +Unless specified otherwise, MetaModel means CMA equipped with a quadratic MetaModel, which is automatically enabled when the learning performs well. +Neural, RF and SVM as prefixes for MetaModel mean that we use Neural nets, Random Forest or Support Vector Machines as +surrogate models. The quadratic and random forest meta-models look best overall. DE or OnePlusOne as a suffix mean that +we use differential evolution or the $(1+1)$ evolution strategy as an underlying optimization model, instead of CMA. +ChainMetaModelSQP is a memetic algorithm: it combines MetaModel (hence, CMA plus a MetaModel) and a final run using a local method, namely SQP (sequential quadratic programming). +Usually, a meta-model and a fast local search a the end do improve evolutionary methods in continuous domains. + +On benchmarks close to the good old BBOB, CMA and variants do perform well. The variants equipped with a MetaModel perform better, and +variants equipped with the the MetaModel and the final local search (namely ChainMetaModelSQP) are even better on YABBOB. It +remains the best with big budget (YABIGBBOB), In the parallel case, many tools are nearly equivalent, and in the small +budget case (YASMALLBBOB, YATINYBBOB, YATUNINGBBOB) Cobyla performs well, as frequently in the state of the art.%\cite{TODO}. + +On many real-world benchmarks, the budget is lower than in the traditional context of BBOB with budget $=$ dimension $\times$ 1000. There +are cases with a ratio budget/dimension $<1$, of the order of a few units or a few dozens. DE performs well in many +cases. This is consistent with many publications considering real-world problems. %TODO +However for the minimum ratio budget/dimension the simple $(1+1)$ evolution strategy, in continuous domains, is +excellent. +We note interesting performances of QODE (quasi-opposite DE) in many cases. + +SQP is excellent in noisy optimization, but methods with ad hoc scale can compete: the scale is critical in noisy +optimization, unless the budget is large. In some cases, TBPSA (combining population-control\cite{mlis} and other tools +as in \cite{vasilfoga}) also performs well, as well as combinations between bandits and evolutionary algorithms +(NoisyDiscreteOnePlusOne).. +Methods designed in the noisy case might diverge. + +We include benchmarks with one or several or many constraints (prefix onepen, pen and megapen), tackled with dynamic +penalization: results were not fundamentally different from the non-penalized case. However, MetaModels are effective in +a very stable and visible manner: this is consistent with the state of the art.% \cite{TODO}. +Cobyla remains very strong in low budget cases. Bayesian Optimization methods are more expensive than other algorithms, +but rarely perform well, though they are comparable to other methods in low budget cases. The different Bayesian +Optimization methods perform differently from each other, which is consistent with their highly parametric nature. + +%MetaModel perform well on BBOB-style optimization, but were also excellent for several low budget things. + +In discrete optimization, INSTRUM\_DISCRETE and SEQUENTIAL\_INSTRUM\_DISCRETE are benchmarks with arity $> 2$ and in +which the order can not be exploited. Methods which ignore the order do perform better. +We also consider PBO and BONNANS test functions. Regarding discrete contexts, we note the great performance of the so-called ``DiscreteLenglerOnePlusOne'' +\cite{lengler}. We tested variants with a different constant (methods with ``Lengler'' and one of ``Fourth, Half, 2, 3'' +in the name) and it turns out that the proved constant, in spite of the simple context in which it was derived, is good. +Adding ``Tabu'' (SA, as self-avoiding, in the name) can make methods better. +Still in the discrete case, we note the good performance of methods with ``Recombining'' in the name: while it was less +investigated theoretically than variants of the discrete $(1+1)$ method, methods with crossover might be quite +competitive. + +Note that Cobyla is frequently at the top, as well as ChainMetaModelSQP, OnePlusOne, DE, RFMetaModel, or DiscreteLenglerOnePlusOne. +It looks likely that wizards could be improved by using more of these, in particular the random forest metamodel which +is rarely used. +Overall, wizards (such as NGOpt or NGOptRW) do perform well, though they are not perfect. Independent tests on other +benchmarks will be interesting as the wizards tested here have been alive in the platform for quite a long time. They +are useful, but can guess wrong sometimes, so human expertise is still quite useful. + +Regarding the principles of benchmarking, we note that the two different views in Nevergrad (the heatmap and the average +normalized loss) present different views, with sometimes significant differences. This emphasizes how much how we look at data has a big impact on the interpretation. + +%Regarding BBOB variants, TODO +% +%Regarding PBBOB, TODO +% +%Regarding Holland and Voronoi crossovers, TODO +% +%Algorithms taking into account the difference between ordered and unordered discrete variables TODO +% +%In the continuous low budget case, Cobyla and BOBYQA are frequently excellent in moderate dimension. The high-dimensional case sees great successes of DE variants. + +Consistently with some real world experiments in \cite{micropredictions1,micropredictions2}, we note the the $(1+1)$ evolution strategy with one-fifth rule from \cite{rechenberg73} is still quite good. In artificial benchmarks with a lot of evaluations, high conditionning and rotated contexts, it can become weak: for many realistic contexts, in particular a realistic ratio budget/dimension, it is quite good. +The simple $(1+1)$ evolution strategy with one-fifth rule, even more when equipped with a meta-model, turned out to be powerful. + +In real-world benchmark tunings, results varied vastly from one benchmark to the next. It looks like we can conclude +anything we want by selecting a benchmark or by tuning algorithms for a specific case. Overall, maybe DE variants and +BOBYQA, HyperOpt, RandomSearch perform well. + +The multi-objective setting is quite difficult to analyze. The DE adapted to the multi-objective case available in +Nevergrad\cite{pde,mode} performs well in classical settings, but high dimensional and many objective cases lead to surprisingly good +results by e.g. discrete methods running in the continuous case (which means they are handled that variables with +infinitely many possibles values). This deserves additional investigation, as using discrete algorithms such as +DiscreteOnePlusOne in continuous difficult problems is rather new and ignores the topology of each variable as if the +real numbers were not ordered: this means randomly redrawing some variables. + +BFGS can be used in a black-box setting, using finite differences. This is, however, rarely a good option. + +Some benchmarks such as photonics or topology optimization have variables organized in arrays of dimension 1 or 2. Not +all algorithms are able to use this. + +\subsection{Caveats, further work} +Some benchmarks were implemented but not included in the release due to legal issues in the license. +We did not include the important case in which a multi-objective run is performed on surrogate models only: (1) randomly sample, (2) approximate the objective functions by surrogate models, (3) perform a multi-objective optimization on the surrogate only. This is useful for including the user in the loop. This is not tested in the current benchmarks. + +Compared to the old Dashboard from 2021, results are somehow similar, with more details. However, we have more real world benchmarks and more discrete experiments. Also, +some methods have been removed, in particular some slow methods which were rarely performing well compared to present methods. + +Key points in benchmarks are frequently how the initialization matches the distribution of the optima, and various +benchmarks are too close to a single algorithm: this can be the case in the present work as well, in particular for +wizards. Therefore, independent runs with your preferred modifications of the settings will be fruitful for us: we made our best +for making this user-friendly. diff --git a/scripts/tex/end.tex b/scripts/tex/end.tex new file mode 100644 index 0000000000..a6c37b13ad --- /dev/null +++ b/scripts/tex/end.tex @@ -0,0 +1,9 @@ + +\section*{Acknowledgements} +We are very grateful to the Dagstuhl seminar 23251 (June 2023), and more specifically to +% Ofer Shir, Thomas B\"ack, Vanessa Volz, Mariapia Marchi, Hao Wang. + +% TODO done in discussions with with Diederick, Carola +\bibliographystyle{abbrv} +\bibliography{biblio.bib} +\end{document} diff --git a/scripts/txt/aquacrop_fao.txt b/scripts/txt/aquacrop_fao.txt new file mode 100644 index 0000000000..80b8091741 --- /dev/null +++ b/scripts/txt/aquacrop_fao.txt @@ -0,0 +1,26 @@ +Optimization of crop management. + + + +budget100 +budget1600 +budget200 +budget25 +budget400 +budget50 +budget800 +dimension3 +dimension4 +dimension5 +dimension6 +seasonal151.5011255099332 +seasonal201.9534568704581 +seasonal201.95345687045815 +seasonal342.54932781948395 +seasonal444.02554299755485 +smts3 +smts4 +smts5 +smts6 +workers1 +workers30 diff --git a/scripts/txt/bonnans.txt b/scripts/txt/bonnans.txt new file mode 100644 index 0000000000..af9ba34936 --- /dev/null +++ b/scripts/txt/bonnans.txt @@ -0,0 +1,34 @@ +Function proposed by Frederic Bonnans. +Discrete optimization. New in the field of black-box optimization. + + +budget100 +budget20 +budget30 +budget40 +budget50 +budget60 +budget70 +budget80 +budget90 +index0 +index1 +index10 +index11 +index12 +index13 +index14 +index15 +index16 +index17 +index18 +index19 +index2 +index20 +index3 +index4 +index5 +index6 +index7 +index8 +index9 diff --git a/scripts/txt/control_problem.txt b/scripts/txt/control_problem.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/scripts/txt/deceptive.txt b/scripts/txt/deceptive.txt new file mode 100644 index 0000000000..c0a65ffea7 --- /dev/null +++ b/scripts/txt/deceptive.txt @@ -0,0 +1,32 @@ +Difficult functions with multimodalities, high conditionning, etc. + + +aggregatormax +aggregatorsum +blocks1 +blocks16 +blocks2 +blocks8 +budget100 +budget1600 +budget200 +budget25 +budget37 +budget400 +budget50 +budget75 +budget800 +budget87 +dimension16 +dimension2 +dimension32 +dimension4 +dimensions16 +dimensions2 +dimensions32 +dimensions4 +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +rotationFalse +rotationTrue diff --git a/scripts/txt/double_o_seven.txt b/scripts/txt/double_o_seven.txt new file mode 100644 index 0000000000..0fca1b7cd0 --- /dev/null +++ b/scripts/txt/double_o_seven.txt @@ -0,0 +1,25 @@ +Game strategy optimization. Noisy. + + +archiDenseNet +archiPerceptron +budget100 +budget1000 +budget10000 +budget200 +budget2000 +budget20000 +budget400 +budget4000 +budget40000 +budget50 +budget500 +budget5000 +dimension15 +dimension675 +repetitions1 +repetitions10 +repetitions100 +workers1 +workers10 +workers100 diff --git a/scripts/txt/fishing.txt b/scripts/txt/fishing.txt new file mode 100644 index 0000000000..4a3b42dc43 --- /dev/null +++ b/scripts/txt/fishing.txt @@ -0,0 +1,21 @@ +Optimization of a strategy in front of Lotka-Volterra equations. + +budget100 +budget1600 +budget200 +budget25 +budget400 +budget50 +budget800 +dimension105 +dimension17 +dimension35 +dimension52 +dimension70 +dimension88 +time105 +time17 +time35 +time52 +time70 +time88 diff --git a/scripts/txt/keras_tuning.txt b/scripts/txt/keras_tuning.txt new file mode 100644 index 0000000000..a57f7264e2 --- /dev/null +++ b/scripts/txt/keras_tuning.txt @@ -0,0 +1,11 @@ +Tuning of machine learning models based on the Keras library. + +budget150 +budget500 +datasetauto-mpg +datasetdiabetes +datasetkerasBoston +datasetred-wine +datasetwhite-wine +workers150 +workers500 diff --git a/scripts/txt/mldakmeans.txt b/scripts/txt/mldakmeans.txt new file mode 100644 index 0000000000..cd5a622ad4 --- /dev/null +++ b/scripts/txt/mldakmeans.txt @@ -0,0 +1,20 @@ +Kmeans, part of the MLDA benchmark. + + +budget1000 +budget10000 +clusters10 +clusters100 +clusters5 +clusters50 +dimension10 +dimension100 +dimension30 +dimension300 +nameGerman towns +nameRuspini +rescaleFalse +rescaleTrue +workers1 +workers10 +workers100 diff --git a/scripts/txt/mltuning.txt b/scripts/txt/mltuning.txt new file mode 100644 index 0000000000..d795cedcb6 --- /dev/null +++ b/scripts/txt/mltuning.txt @@ -0,0 +1,23 @@ +Tuning of scikit learn models. Hyperparameters. + +budget150 +budget500 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree +workers150 +workers500 diff --git a/scripts/txt/mono_rocket.txt b/scripts/txt/mono_rocket.txt new file mode 100644 index 0000000000..9aaec744ae --- /dev/null +++ b/scripts/txt/mono_rocket.txt @@ -0,0 +1,26 @@ +Rocket parametrization, optimized sequentially. + +budget100 +budget1600 +budget200 +budget25 +budget400 +budget50 +budget800 +symmetry0 +symmetry1 +symmetry10 +symmetry11 +symmetry12 +symmetry13 +symmetry14 +symmetry15 +symmetry16 +symmetry2 +symmetry3 +symmetry4 +symmetry5 +symmetry6 +symmetry7 +symmetry8 +symmetry9 diff --git a/scripts/txt/multimodal.txt b/scripts/txt/multimodal.txt new file mode 100644 index 0000000000..173a6e84f4 --- /dev/null +++ b/scripts/txt/multimodal.txt @@ -0,0 +1,22 @@ +Multimodal artificial problems. + +budget10000 +budget100000 +budget3000 +budget30000 +dimension150 +dimension18 +dimension25 +dimension3 +dimensions25 +dimensions3 +nameackley +namedeceptivemultimodal +namegriewank +namehm +namelunacek +namerastrigin +namerosenbrock +variables0 +variables125 +variables15 diff --git a/scripts/txt/multiobjective_example.txt b/scripts/txt/multiobjective_example.txt new file mode 100644 index 0000000000..5d2df22bb6 --- /dev/null +++ b/scripts/txt/multiobjective_example.txt @@ -0,0 +1,14 @@ +Multiobjective artificial benchmark. + +budget100 +budget1600 +budget200 +budget3200 +budget400 +budget800 +dimension6 +dimension7 +objectives2 +objectives3 +workers1 +workers100 diff --git a/scripts/txt/multiobjective_example_hd.txt b/scripts/txt/multiobjective_example_hd.txt new file mode 100644 index 0000000000..71e48601d0 --- /dev/null +++ b/scripts/txt/multiobjective_example_hd.txt @@ -0,0 +1,14 @@ +High-dimensional multiobjective problem. + +budget100 +budget1600 +budget200 +budget3200 +budget400 +budget800 +dimension1999 +dimension2000 +objectives2 +objectives3 +workers1 +workers100 diff --git a/scripts/txt/multiobjective_example_many_hd.txt b/scripts/txt/multiobjective_example_many_hd.txt new file mode 100644 index 0000000000..2099c9ade9 --- /dev/null +++ b/scripts/txt/multiobjective_example_many_hd.txt @@ -0,0 +1,14 @@ +Many-Objective High-dimensional optimization. + +budget100 +budget1600 +budget200 +budget3200 +budget400 +budget800 +dimension1999 +dimension2000 +objectives4 +objectives6 +workers1 +workers100 diff --git a/scripts/txt/naive_seq_keras_tuning.txt b/scripts/txt/naive_seq_keras_tuning.txt new file mode 100644 index 0000000000..592e7edcc3 --- /dev/null +++ b/scripts/txt/naive_seq_keras_tuning.txt @@ -0,0 +1,12 @@ +Keras tuning, but not very parallel and without overfitting. + +budget150 +budget500 +datasetauto-mpg +datasetdiabetes +datasetkerasBoston +datasetred-wine +datasetwhite-wine +workers1 +workers125 +workers37 diff --git a/scripts/txt/naive_seq_mltuning.txt b/scripts/txt/naive_seq_mltuning.txt new file mode 100644 index 0000000000..d9c39afc53 --- /dev/null +++ b/scripts/txt/naive_seq_mltuning.txt @@ -0,0 +1,25 @@ +Hyperparameter tuning but without overfitting the scikit learn models, hence the naming "naive". +Corresponds to a case with sufficiently large datasets for not caring about overfitting. + +budget150 +budget500 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree +workers1 +workers125 +workers37 diff --git a/scripts/txt/naive_veryseq_keras_tuning.txt b/scripts/txt/naive_veryseq_keras_tuning.txt new file mode 100644 index 0000000000..3b04ded6a8 --- /dev/null +++ b/scripts/txt/naive_veryseq_keras_tuning.txt @@ -0,0 +1,9 @@ +Completely sequential hyperparameter optimization of Keras model without overfitting. + +budget150 +budget500 +datasetauto-mpg +datasetdiabetes +datasetkerasBoston +datasetred-wine +datasetwhite-wine diff --git a/scripts/txt/naivemltuning.txt b/scripts/txt/naivemltuning.txt new file mode 100644 index 0000000000..fc361a6050 --- /dev/null +++ b/scripts/txt/naivemltuning.txt @@ -0,0 +1,23 @@ +Tuning of scikit learn models, without overfitting. + +budget150 +budget500 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree +workers150 +workers500 diff --git a/scripts/txt/nano_naive_seq_mltuning.txt b/scripts/txt/nano_naive_seq_mltuning.txt new file mode 100644 index 0000000000..8b59ea2e4a --- /dev/null +++ b/scripts/txt/nano_naive_seq_mltuning.txt @@ -0,0 +1,24 @@ +Small budget, not very parallel, hyperparameter of scikit learn models without overfitting. + +budget160 +budget80 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree +workers1 +workers20 +workers40 diff --git a/scripts/txt/nano_naive_veryseq_mltuning.txt b/scripts/txt/nano_naive_veryseq_mltuning.txt new file mode 100644 index 0000000000..f1fb4f54c6 --- /dev/null +++ b/scripts/txt/nano_naive_veryseq_mltuning.txt @@ -0,0 +1,21 @@ +Completely sequential hyperparameter optimization of scikit learn models without overfitting (naive). Low budget (nano). + +budget160 +budget80 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree diff --git a/scripts/txt/nano_seq_mltuning.txt b/scripts/txt/nano_seq_mltuning.txt new file mode 100644 index 0000000000..cc7357111b --- /dev/null +++ b/scripts/txt/nano_seq_mltuning.txt @@ -0,0 +1,24 @@ +Low budget, not very parallel, hyperparameter optimization for scikit learn models. + +budget160 +budget80 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree +workers1 +workers20 +workers40 diff --git a/scripts/txt/nano_veryseq_mltuning.txt b/scripts/txt/nano_veryseq_mltuning.txt new file mode 100644 index 0000000000..526c300c78 --- /dev/null +++ b/scripts/txt/nano_veryseq_mltuning.txt @@ -0,0 +1,21 @@ +Completely sequential (veryseq) hyperparameter of scikit learn models. Low budget (hence the name nano). + +budget160 +budget80 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree diff --git a/scripts/txt/neuro_control_problem.txt b/scripts/txt/neuro_control_problem.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/scripts/txt/oneshot_mltuning.txt b/scripts/txt/oneshot_mltuning.txt new file mode 100644 index 0000000000..4a246beff5 --- /dev/null +++ b/scripts/txt/oneshot_mltuning.txt @@ -0,0 +1,23 @@ +Tuning in a single pass (one-shot) of scikit learn models. + +budget150 +budget500 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree +workers150 +workers500 diff --git a/scripts/txt/pbbob.txt b/scripts/txt/pbbob.txt new file mode 100644 index 0000000000..ee6a17a01c --- /dev/null +++ b/scripts/txt/pbbob.txt @@ -0,0 +1,27 @@ +PBBOB, i.e. parametric bbob. + +budget100 +budget200 +budget300 +budget400 +budget500 +budget600 +budget700 +budget800 +dimension20 +dimension40 +dimensions20 +dimensions40 +expo1 +expo3 +expo5 +expo7 +expo9 +namecigar +namedeceptivemultimodal +namehm +namerastrigin +namesphere +workers1 +workers10 +workers50 diff --git a/scripts/txt/pbo_reduced_suite.txt b/scripts/txt/pbo_reduced_suite.txt new file mode 100644 index 0000000000..d45d83bd7b --- /dev/null +++ b/scripts/txt/pbo_reduced_suite.txt @@ -0,0 +1,29 @@ +PBO, a subset. This comes from IOH. The functions are defined on various domains and the variables have two possible +values. + +budget100 +budget1000 +budget10000 +dim100 +dim16 +dim64 +fid10 +fid13 +fid16 +fid17 +fid19 +fid20 +fid23 +fid3 +fid4 +fid6 +fid7 +iid1 +iid2 +iid3 +iid4 +instrumentationOrdered +instrumentationSoftmax +instrumentationUnordered +workers1 +workers10 diff --git a/scripts/txt/reduced_yahdlbbbob.txt b/scripts/txt/reduced_yahdlbbbob.txt new file mode 100644 index 0000000000..bda933941b --- /dev/null +++ b/scripts/txt/reduced_yahdlbbbob.txt @@ -0,0 +1,21 @@ +High-dimensional variant of YABBOB, reduced set of problems. + +budget10 +budget20 +budget40 +dimension100 +dimension1000 +dimension3000 +dimensions100 +dimensions1000 +dimensions3000 +namedeceptiveillcond +namediscus +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namestepdoublelinearslope +rotationFalse +rotationTrue diff --git a/scripts/txt/rocket.txt b/scripts/txt/rocket.txt new file mode 100644 index 0000000000..0d7b49700c --- /dev/null +++ b/scripts/txt/rocket.txt @@ -0,0 +1,26 @@ +Optimization of the design of a rocket. + +budget100 +budget1600 +budget200 +budget25 +budget400 +budget50 +budget800 +symmetry0 +symmetry1 +symmetry10 +symmetry11 +symmetry12 +symmetry13 +symmetry14 +symmetry15 +symmetry16 +symmetry2 +symmetry3 +symmetry4 +symmetry5 +symmetry6 +symmetry7 +symmetry8 +symmetry9 diff --git a/scripts/txt/seq_keras_tuning.txt b/scripts/txt/seq_keras_tuning.txt new file mode 100644 index 0000000000..03ee6f2465 --- /dev/null +++ b/scripts/txt/seq_keras_tuning.txt @@ -0,0 +1,12 @@ +Not very parallel hyperparameter optimization of the hyperparameters of Keras models. + +budget150 +budget500 +datasetauto-mpg +datasetdiabetes +datasetkerasBoston +datasetred-wine +datasetwhite-wine +workers1 +workers125 +workers37 diff --git a/scripts/txt/seq_mltuning.txt b/scripts/txt/seq_mltuning.txt new file mode 100644 index 0000000000..146048cf07 --- /dev/null +++ b/scripts/txt/seq_mltuning.txt @@ -0,0 +1,24 @@ +Not very parallel (seq) optimization of scikit learn models. + +budget150 +budget500 +datasetartificial +datasetartificialcos +datasetartificialsquare +datasetauto-mpg +datasetboston +datasetdiabetes +datasetred-wine +datasetwhite-wine +depth +dimension1 +dimension11 +dimension2 +dimension3 +dimension5 +dimensionnan +regressormlp +tree +workers1 +workers125 +workers37 diff --git a/scripts/txt/sequential_topology_optimization.txt b/scripts/txt/sequential_topology_optimization.txt new file mode 100644 index 0000000000..ee18dfad44 --- /dev/null +++ b/scripts/txt/sequential_topology_optimization.txt @@ -0,0 +1,29 @@ +Topology optimization, sequential. + +budget10 +budget10240 +budget1280 +budget160 +budget20 +budget20480 +budget2560 +budget320 +budget40 +budget40960 +budget5120 +budget640 +budget80 +dimension100 +dimension1600 +dimension400 +dimension900 +n10 +n20 +n30 +n40 +parametrizationarray10x10 +parametrizationarray20x20 +parametrizationarray30x30 +parametrizationarray40x40 +workers1 +workers30 diff --git a/scripts/txt/spsa_benchmark.txt b/scripts/txt/spsa_benchmark.txt new file mode 100644 index 0000000000..0dff46bd8d --- /dev/null +++ b/scripts/txt/spsa_benchmark.txt @@ -0,0 +1,16 @@ +Noisy optimization benchmark originally used for testing SPSA. + +budget1000 +budget128000 +budget16000 +budget2000 +budget32000 +budget4000 +budget500 +budget64000 +budget8000 +namecigar +namesphere +namesphere4 +rotationFalse +rotationTrue diff --git a/scripts/txt/topology_optimization.txt b/scripts/txt/topology_optimization.txt new file mode 100644 index 0000000000..3cc515e8e1 --- /dev/null +++ b/scripts/txt/topology_optimization.txt @@ -0,0 +1,29 @@ +Topology optimization. + +budget10 +budget10240 +budget1280 +budget160 +budget20 +budget20480 +budget2560 +budget320 +budget40 +budget40960 +budget5120 +budget640 +budget80 +dimension100 +dimension1600 +dimension400 +dimension900 +n10 +n20 +n30 +n40 +parametrizationarray10x10 +parametrizationarray20x20 +parametrizationarray30x30 +parametrizationarray40x40 +workers1 +workers30 diff --git a/scripts/txt/ultrasmall_photonics.txt b/scripts/txt/ultrasmall_photonics.txt new file mode 100644 index 0000000000..3ec2bef6d6 --- /dev/null +++ b/scripts/txt/ultrasmall_photonics.txt @@ -0,0 +1,7 @@ +Ultrasmall optimization of photonics. Shape optimization, nanometric scale. + +budget10 +budget100 +budget1000 +methodclipping +methodtanh diff --git a/scripts/txt/ultrasmall_photonics2.txt b/scripts/txt/ultrasmall_photonics2.txt new file mode 100644 index 0000000000..f1c7c19274 --- /dev/null +++ b/scripts/txt/ultrasmall_photonics2.txt @@ -0,0 +1,7 @@ +Very small benchmark for photonics, 2nd category of parametrizations. + +budget10 +budget100 +budget1000 +methodclipping +methodtanh diff --git a/scripts/txt/veryseq_keras_tuning.txt b/scripts/txt/veryseq_keras_tuning.txt new file mode 100644 index 0000000000..5650169702 --- /dev/null +++ b/scripts/txt/veryseq_keras_tuning.txt @@ -0,0 +1,9 @@ +Completely sequential optimization of the hyperparameters of Keras models. + +budget150 +budget500 +datasetauto-mpg +datasetdiabetes +datasetkerasBoston +datasetred-wine +datasetwhite-wine diff --git a/scripts/txt/yabbob.txt b/scripts/txt/yabbob.txt new file mode 100644 index 0000000000..4f4cae8db0 --- /dev/null +++ b/scripts/txt/yabbob.txt @@ -0,0 +1,36 @@ +YABBOB: a small-dimensional benchmark with large budget, similar to BBOB/COCO, + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yabigbbob.txt b/scripts/txt/yabigbbob.txt new file mode 100644 index 0000000000..5891e198f3 --- /dev/null +++ b/scripts/txt/yabigbbob.txt @@ -0,0 +1,35 @@ +Counterpart of YABBOB with big budget. + +budget160000 +budget320000 +budget40000 +budget80000 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaboundedbbob.txt b/scripts/txt/yaboundedbbob.txt new file mode 100644 index 0000000000..6122dd5f86 --- /dev/null +++ b/scripts/txt/yaboundedbbob.txt @@ -0,0 +1,30 @@ +Counterpart of BBOB with a bounded domain. + +budget10 +budget100 +budget20 +budget300 +budget40 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaboxbbob.txt b/scripts/txt/yaboxbbob.txt new file mode 100644 index 0000000000..8506d1045e --- /dev/null +++ b/scripts/txt/yaboxbbob.txt @@ -0,0 +1,36 @@ +Counterpart of BBOB with a bounded domain (bounds are different from those of YABOXBBOB). + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yahdbbob.txt b/scripts/txt/yahdbbob.txt new file mode 100644 index 0000000000..d21c492242 --- /dev/null +++ b/scripts/txt/yahdbbob.txt @@ -0,0 +1,36 @@ +HD counterpart of YABBOB. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension100 +dimension1000 +dimension3000 +dimensions100 +dimensions1000 +dimensions3000 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yamegapenbbob.txt b/scripts/txt/yamegapenbbob.txt new file mode 100644 index 0000000000..8f5b788659 --- /dev/null +++ b/scripts/txt/yamegapenbbob.txt @@ -0,0 +1,36 @@ +Counterpart of YABBOB with a lot of constraints. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yamegapenboundedbbob.txt b/scripts/txt/yamegapenboundedbbob.txt new file mode 100644 index 0000000000..91d529acee --- /dev/null +++ b/scripts/txt/yamegapenboundedbbob.txt @@ -0,0 +1,30 @@ +Counterpart of YABOUNDEDBBOB (bounded, therefore) with a lot of constraints. + +budget10 +budget100 +budget20 +budget300 +budget40 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yamegapenboxbbob.txt b/scripts/txt/yamegapenboxbbob.txt new file mode 100644 index 0000000000..88cd2c212a --- /dev/null +++ b/scripts/txt/yamegapenboxbbob.txt @@ -0,0 +1,36 @@ +Counterpart of YABOXBBOB (bounded, therefore) with a lot of constraints. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yanoisybbob.txt b/scripts/txt/yanoisybbob.txt new file mode 100644 index 0000000000..572b63d379 --- /dev/null +++ b/scripts/txt/yanoisybbob.txt @@ -0,0 +1,36 @@ +Noisy optimization counterpart of YABBOB. +The implementation with ask/tell/recommend is supposed to be correct, which is not the case in all noisy optimization +benchmarks. + +budget12800 +budget3200 +budget51200 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaonepenbbob.txt b/scripts/txt/yaonepenbbob.txt new file mode 100644 index 0000000000..43e2597a03 --- /dev/null +++ b/scripts/txt/yaonepenbbob.txt @@ -0,0 +1,36 @@ +Counterpart of YABBOB with a single constraint. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaonepenboundedbbob.txt b/scripts/txt/yaonepenboundedbbob.txt new file mode 100644 index 0000000000..edc420ee65 --- /dev/null +++ b/scripts/txt/yaonepenboundedbbob.txt @@ -0,0 +1,30 @@ +Counterpart of YABOUNDEDBBOB with a single constraint. + +budget10 +budget100 +budget20 +budget300 +budget40 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaonepenboxbbob.txt b/scripts/txt/yaonepenboxbbob.txt new file mode 100644 index 0000000000..bc4307f132 --- /dev/null +++ b/scripts/txt/yaonepenboxbbob.txt @@ -0,0 +1,36 @@ +Counterpart of YABOXBBOB with a single constraint. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaonepennoisybbob.txt b/scripts/txt/yaonepennoisybbob.txt new file mode 100644 index 0000000000..35b76f5103 --- /dev/null +++ b/scripts/txt/yaonepennoisybbob.txt @@ -0,0 +1,33 @@ +Counterpart of YANOISYBBOB with one constraint. + +budget12800 +budget3200 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaonepenparabbob.txt b/scripts/txt/yaonepenparabbob.txt new file mode 100644 index 0000000000..a291d6e4c1 --- /dev/null +++ b/scripts/txt/yaonepenparabbob.txt @@ -0,0 +1,36 @@ +Counterpart of YAPARABBOB (parallel optimization for YABBOB) with a single constraint. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaonepensmallbbob.txt b/scripts/txt/yaonepensmallbbob.txt new file mode 100644 index 0000000000..9a2c00b4a2 --- /dev/null +++ b/scripts/txt/yaonepensmallbbob.txt @@ -0,0 +1,34 @@ +Single constraint on YASMALLBBOB. + +budget10 +budget20 +budget40 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yaparabbob.txt b/scripts/txt/yaparabbob.txt new file mode 100644 index 0000000000..6f412bf196 --- /dev/null +++ b/scripts/txt/yaparabbob.txt @@ -0,0 +1,36 @@ +Counterpart of YABBOB with parallel optimization. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yapenbbob.txt b/scripts/txt/yapenbbob.txt new file mode 100644 index 0000000000..87de93a169 --- /dev/null +++ b/scripts/txt/yapenbbob.txt @@ -0,0 +1,36 @@ +Counterpart of YABBOB with some constraints. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yapenboundedbbob.txt b/scripts/txt/yapenboundedbbob.txt new file mode 100644 index 0000000000..ef0978234a --- /dev/null +++ b/scripts/txt/yapenboundedbbob.txt @@ -0,0 +1,30 @@ +Counterpart of YABOUNDEDBBOB with some constraints. + +budget10 +budget100 +budget20 +budget300 +budget40 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yapenboxbbob.txt b/scripts/txt/yapenboxbbob.txt new file mode 100644 index 0000000000..c6b6de0833 --- /dev/null +++ b/scripts/txt/yapenboxbbob.txt @@ -0,0 +1,36 @@ +Counterpart of YABOXBBOB with some constraints. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yapennoisybbob.txt b/scripts/txt/yapennoisybbob.txt new file mode 100644 index 0000000000..36c8c520b2 --- /dev/null +++ b/scripts/txt/yapennoisybbob.txt @@ -0,0 +1,33 @@ +Constraints of YANOISYBBOB with some constraints. + +budget12800 +budget3200 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yapenparabbob.txt b/scripts/txt/yapenparabbob.txt new file mode 100644 index 0000000000..131b6c8d6f --- /dev/null +++ b/scripts/txt/yapenparabbob.txt @@ -0,0 +1,36 @@ +Counterpart of YAPARABBOB with some constraints. + +budget12800 +budget200 +budget3200 +budget50 +budget800 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yapensmallbbob.txt b/scripts/txt/yapensmallbbob.txt new file mode 100644 index 0000000000..13825ae457 --- /dev/null +++ b/scripts/txt/yapensmallbbob.txt @@ -0,0 +1,34 @@ +Penalized counterpart of YASMALLBBOB (i.e. low budget). + +budget10 +budget20 +budget40 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yasmallbbob.txt b/scripts/txt/yasmallbbob.txt new file mode 100644 index 0000000000..24f4659510 --- /dev/null +++ b/scripts/txt/yasmallbbob.txt @@ -0,0 +1,34 @@ +Low budget counterpart of YABBOB. + +budget10 +budget20 +budget40 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namebentcigar +namebucherastrigin +namecigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +nameellipsoid +namegriewank +namehm +namelunacek +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue diff --git a/scripts/txt/yatinybbob.txt b/scripts/txt/yatinybbob.txt new file mode 100644 index 0000000000..3d53e67d9a --- /dev/null +++ b/scripts/txt/yatinybbob.txt @@ -0,0 +1,24 @@ +Counterpart of YABBOB with very low budget. + +budget10 +budget20 +budget40 +dimension10 +dimension2 +dimension50 +dimensions10 +dimensions2 +dimensions50 +nameackley +namealtcigar +namealtellipsoid +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +namedoublelinearslope +namegriewank +namehm +namemultipeak +rotationFalse +rotationTrue diff --git a/scripts/txt/yatuningbbob.txt b/scripts/txt/yatuningbbob.txt new file mode 100644 index 0000000000..580c166e7d --- /dev/null +++ b/scripts/txt/yatuningbbob.txt @@ -0,0 +1,29 @@ +Counterpart of YABBOB scaled for slightly looking like hyperparameter tuning problems. Only continuous though. + +budget10 +budget20 +budget40 +dimension10 +dimension15 +dimension2 +dimension5 +dimensions10 +dimensions15 +dimensions2 +dimensions5 +nameackley +namealtcigar +namedeceptiveillcond +namedeceptivemultimodal +namedeceptivepath +namediscus +nameellipsoid +namehm +namemultipeak +namerastrigin +namerosenbrock +namesphere +namestepdoublelinearslope +namestepellipsoid +rotationFalse +rotationTrue