Skip to content

Commit

Permalink
scale_cov_with_max defaults to False
Browse files Browse the repository at this point in the history
chyalexcheng committed Dec 9, 2023
1 parent 23cd753 commit 6edc2a9
Showing 4 changed files with 14 additions and 11 deletions.
5 changes: 4 additions & 1 deletion grainlearning/bayesian_calibration.py
Original file line number Diff line number Diff line change
@@ -184,7 +184,10 @@ def load_and_process(self, sigma: float = 0.1):
def load_all(self):
"""Simply load all previous iterations of Bayesian calibration
"""
for _ in range(self.num_iter - 1):
self.load_system()
self.calibration.add_curr_param_data_to_list(self.system.param_data)
self.increase_curr_iter()
while self.curr_iter < self.num_iter:
print(f"Bayesian calibration iter No. {self.curr_iter}")
self.load_system()
self.calibration.add_curr_param_data_to_list(self.system.param_data)
10 changes: 5 additions & 5 deletions grainlearning/inference.py
Original file line number Diff line number Diff line change
@@ -21,7 +21,7 @@ class SMC:
system_cls = SMC.from_dict(
{
"ess_target": 0.3,
"scale_cov_with_max": True
"scale_cov_with_max": False
}
)
@@ -34,7 +34,7 @@ class SMC:
system_cls = SMC(
ess_target = 0.3,
scale_cov_with_max = True
scale_cov_with_max = False
)
:param ess_target: Target effective sample size (w_0 / sum_i w_i^2)
@@ -49,7 +49,7 @@ class SMC:
ess_target: float

#: True if the covariance matrix is scaled with the maximum of the observations, defaults to True
scale_cov_with_max: bool = True
scale_cov_with_max: bool = False

#: Covariance matrices of shape (num_steps, num_obs, num_obs)
cov_matrices: np.array
@@ -66,7 +66,7 @@ class SMC:
def __init__(
self,
ess_target: float,
scale_cov_with_max: bool = True,
scale_cov_with_max: bool = False,
cov_matrices: np.array = None,
):
"""Initialize the SMC class"""
@@ -85,7 +85,7 @@ def from_dict(cls: Type["SMC"], obj: dict):
"""
return cls(
ess_target=obj["ess_target"],
scale_cov_with_max=obj.get("scale_cov_with_max", True),
scale_cov_with_max=obj.get("scale_cov_with_max", False),
cov_matrices=obj.get("cov_matrices", None),
)

8 changes: 4 additions & 4 deletions grainlearning/sampling.py
Original file line number Diff line number Diff line change
@@ -182,11 +182,11 @@ def from_dict(cls: Type["GaussianMixtureModel"], obj: dict):
max_num_components=obj["max_num_components"],
weight_concentration_prior=obj.get("weight_concentration_prior", None),
covariance_type=obj.get("covariance_type", "tied"),
n_init=obj.get("n_init", 1),
n_init=obj.get("n_init", 10),
tol=obj.get("tol", 1.0e-5),
max_iter=obj.get("max_iter", 100),
random_state=obj.get("random_state", None),
init_params=obj.get("init_params", "kmeans"),
init_params=obj.get("init_params", "k-means++"),
warm_start=obj.get("warm_start", False),
expand_factor=obj.get("expand_factor", 10),
slice_sampling=obj.get("slice_sampling", False),
@@ -258,7 +258,7 @@ def regenerate_params(self, weight: np.ndarray, system: Type["DynamicSystem"]):
# resample until all parameters are within the upper and lower bounds
test_num = system.num_samples
while system.param_min and system.param_max and new_params.shape[0] < minimum_num_samples:
test_num = int(np.ceil(1.1 * test_num))
test_num = int(np.ceil(1.01 * test_num))
new_params = self.draw_samples_within_bounds(system, test_num)

return new_params
@@ -281,7 +281,7 @@ def draw_samples_within_bounds(self, system: Type["DynamicSystem"], num: int = 1

scores = self.gmm.score_samples(self.expanded_normalized_params)
new_params = new_params[np.where(
self.gmm.score_samples(new_params) > scores.mean() - 2 * scores.std())]
self.gmm.score_samples(new_params) > scores.mean())]

new_params *= self.max_params

2 changes: 1 addition & 1 deletion grainlearning/tools.py
Original file line number Diff line number Diff line change
@@ -574,7 +574,7 @@ def close_plots(save_fig=0):
import matplotlib.pylab as plt
if not save_fig:
plt.show()
plt.close()
plt.close('all')


def write_dict_to_file(data, file_name):

0 comments on commit 6edc2a9

Please sign in to comment.