diff --git a/mloop/learners.py b/mloop/learners.py index bcfcb0f..1265ba6 100644 --- a/mloop/learners.py +++ b/mloop/learners.py @@ -9,7 +9,6 @@ import threading import numpy as np import random -import numpy.random as nr import scipy.optimize as so import logging import datetime @@ -474,7 +473,7 @@ def run(self): ''' self.log.debug('Starting Random Learner') if self.first_params is None: - next_params = self.min_boundary + nr.rand(self.num_params) * self.diff_boundary + next_params = mlu.rng.uniform(self.min_boundary, self.max_boundary) else: next_params = self.first_params while not self.end_event.is_set(): @@ -490,9 +489,12 @@ def run(self): if self.has_trust_region: temp_min = np.maximum(self.min_boundary, self.best_params - self.trust_region) temp_max = np.minimum(self.max_boundary, self.best_params + self.trust_region) - next_params = temp_min + nr.rand(self.num_params) * (temp_max - temp_min) + next_params = mlu.rng.uniform(temp_min, temp_max) else: - next_params = self.min_boundary + nr.rand(self.num_params) * self.diff_boundary + next_params = mlu.rng.uniform( + self.min_boundary, + self.max_boundary, + ) self._shut_down() self.log.debug('Ended Random Learner') @@ -562,7 +564,7 @@ def __init__(self, raise ValueError(msg) if initial_simplex_corner is None: - diff_roll = (self.diff_boundary - self.init_simplex_disp) * nr.rand(self.num_params) + diff_roll = (self.diff_boundary - self.init_simplex_disp) * mlu.rng.random(self.num_params) diff_roll[diff_roll==float('+inf')]= 0 self.init_simplex_corner = self.min_boundary self.init_simplex_corner[self.init_simplex_corner==float('-inf')]=0 @@ -895,7 +897,7 @@ def generate_population(self): curr_params = self.first_params self.first_sample = False else: - curr_params = self.min_boundary + nr.rand(self.num_params) * self.diff_boundary + curr_params = mlu.rng.uniform(self.min_boundary, self.max_boundary) curr_cost = self.put_params_and_get_cost(curr_params) @@ -907,9 +909,12 @@ def generate_population(self): if self.has_trust_region: temp_min = np.maximum(self.min_boundary,self.population[self.min_index] - self.trust_region) temp_max = np.minimum(self.max_boundary,self.population[self.min_index] + self.trust_region) - curr_params = temp_min + nr.rand(self.num_params) * (temp_max - temp_min) + curr_params = mlu.rng.uniform(temp_min, temp_max) else: - curr_params = self.min_boundary + nr.rand(self.num_params) * self.diff_boundary + curr_params = mlu.rng.uniform( + self.min_boundary, + self.max_boundary, + ) curr_cost = self.put_params_and_get_cost(curr_params) @@ -932,7 +937,10 @@ def next_generation(self): Evolve the population by a single generation ''' - self.curr_scale = nr.uniform(self.mutation_scale[0], self.mutation_scale[1]) + self.curr_scale = mlu.rng.uniform( + self.mutation_scale[0], + self.mutation_scale[1], + ) for index in range(self.num_population_members): @@ -959,18 +967,18 @@ def mutate(self, index): index (int): Index of the point to be mutated. ''' - fill_point = nr.randint(0, self.num_params) + fill_point = mlu.rng.integers(0, self.num_params) candidate_params = self.mutation_func(index) - crossovers = nr.rand(self.num_params) < self.cross_over_probability + crossovers = mlu.rng.random(self.num_params) < self.cross_over_probability crossovers[fill_point] = True mutated_params = np.where(crossovers, candidate_params, self.population[index]) if self.has_trust_region: temp_min = np.maximum(self.min_boundary,self.population[self.min_index] - self.trust_region) temp_max = np.minimum(self.max_boundary,self.population[self.min_index] + self.trust_region) - rand_params = temp_min + nr.rand(self.num_params) * (temp_max - temp_min) + rand_params = mlu.rng.uniform(temp_min, temp_max) else: - rand_params = self.min_boundary + nr.rand(self.num_params) * self.diff_boundary + rand_params = mlu.rng.uniform(self.min_boundary, self.max_boundary) projected_params = np.where(np.logical_or(mutated_params < self.min_boundary, mutated_params > self.max_boundary), rand_params, mutated_params) @@ -1527,7 +1535,9 @@ def update_search_params(self): self.search_params = [] self.search_params.append(self.best_params) for _ in range(self.parameter_searches): - self.search_params.append(self.search_min + nr.uniform(size=self.num_params) * self.search_diff) + self.search_params.append( + mlu.rng.uniform(self.search_min, self.search_max), + ) def _find_predicted_minimum( self, @@ -2802,7 +2812,7 @@ def predict_cost( in scaled units if `perform_scaling` is `False`. ''' if net_index is None: - net_index = nr.randint(self.num_nets) + net_index = mlu.rng.integers(self.num_nets) net = self.neural_net[net_index] cost = net.predict_cost(params, perform_scaling=perform_scaling) if perform_scaling: @@ -2845,7 +2855,7 @@ def predict_cost_gradient( it will be in scaled units if `perform_scaling` is `False`. ''' if net_index is None: - net_index = nr.randint(self.num_nets) + net_index = mlu.rng.integers(self.num_nets) net = self.neural_net[net_index] cost_gradient = net.predict_cost_gradient( params, @@ -2937,7 +2947,7 @@ def find_next_parameters(self, net_index=None): ''' # Set default values. if net_index is None: - net_index = nr.randint(self.num_nets) + net_index = mlu.rng.integers(self.num_nets) net = self.neural_net[net_index] # Create functions for the search. @@ -3082,7 +3092,7 @@ def find_global_minima(self, net_index=None): # Set default values. if net_index is None: - net_index = nr.randint(self.num_nets) + net_index = mlu.rng.integers(self.num_nets) # Call self.find_next_parameters() since that method searches for the # predicted minimum. diff --git a/mloop/neuralnet.py b/mloop/neuralnet.py index 9701e2f..83c9cc1 100644 --- a/mloop/neuralnet.py +++ b/mloop/neuralnet.py @@ -7,7 +7,6 @@ import mloop.utilities as mlu import numpy as np -import numpy.random as nr import sklearn.preprocessing as skp import tensorflow as tf diff --git a/mloop/testing.py b/mloop/testing.py index e7fa269..6051e97 100644 --- a/mloop/testing.py +++ b/mloop/testing.py @@ -7,7 +7,6 @@ import numpy as np import threading import mloop.utilities as mlu -import numpy.random as nr import logging import os import time @@ -51,8 +50,8 @@ def set_random_quadratic_landscape(self, min_region, max_region, random_scale=Tr min_scale (float): Natural log of minimum scale factor. Default 0. max_scale (float): Natural log of maximum scale factor. Default 3. ''' - mini = min_region + nr.rand(self.num_params) * (max_region - min_region) - scal = np.exp(min_scale + nr.rand(self.num_params) * (max_scale - min_scale)) + mini = mlu.rng.uniform(min_region, max_region) + scal = np.exp(mlu.rng.uniform(min_scale, max_scale)) self.set_quadratic_landscape(minimum = mini,scale = scal) def set_quadratic_landscape(self, minimum = None, scale = None): @@ -102,7 +101,7 @@ def set_noise_function(self, proportional=0.0, absolute=0.0): self.noise_prop = proportional self.noise_abs = absolute - self.noise_function = lambda p,c,u : (c *(1 + nr.normal()*self.noise_prop) + nr.normal()*self.noise_abs,np.sqrt((c*self.noise_prop)**2 + (self.noise_abs)**2)) + self.noise_function = lambda p,c,u : (c *(1 + mlu.rng.normal()*self.noise_prop) + mlu.rng.normal()*self.noise_abs,np.sqrt((c*self.noise_prop)**2 + (self.noise_abs)**2)) def set_bad_region(self, min_boundary, max_boundary, bad_cost=None, bad_uncer=None): ''' diff --git a/mloop/utilities.py b/mloop/utilities.py index 5ec5ea1..4fb4239 100644 --- a/mloop/utilities.py +++ b/mloop/utilities.py @@ -12,7 +12,6 @@ import sys import os import numpy as np -import numpy.random as nr import base64 import mloop @@ -42,6 +41,12 @@ #Set numpy to have no limit on printing to ensure all values are saved np.set_printoptions(threshold=np.inf) +# Create a random number generator that can be used throughout M-LOOP. Users +# could also seed this generator if they want to fix the random numbers +# generated in M-LOOP (though this won't affect the generators used by M-LOOP +# dependencies, such as tensorflow). +rng = np.random.default_rng() + def config_logger(**kwargs): ''' Wrapper for _config_logger. @@ -131,7 +136,7 @@ class defined in the datetime module. If set to None, then this date_string = datetime_to_string(file_datetime) filename_suffix = '_' + date_string if random_bytes: - random_string = base64.urlsafe_b64encode(nr.bytes(6)).decode() + random_string = base64.urlsafe_b64encode(rng.bytes(6)).decode() filename_suffix = filename_suffix + '_' + random_string filename_suffix = filename_suffix + '.' + file_type return filename_suffix