Skip to content

Commit

Permalink
Linking to dem (#49)
Browse files Browse the repository at this point in the history
* linking two YADE (two particle collision)

* linking to YADE (triaxial compression)
  • Loading branch information
chyalexcheng authored Jul 21, 2023
1 parent c25b10f commit 65da839
Show file tree
Hide file tree
Showing 27 changed files with 2,034 additions and 98 deletions.
2 changes: 1 addition & 1 deletion docs/source/tutorials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ and `sim_data_file_ext` is correct such that GrainLearning can find the data in
"param_max": [1, 10],
"param_names": ['a', 'b'],
"num_samples": 20,
"obs_data_file": 'linearObs.dat',
"obs_data_file": 'linear_obs.dat',
"obs_names": ['f'],
"ctrl_name": 'u',
"sim_name": 'linear',
Expand Down
37 changes: 15 additions & 22 deletions grainlearning/dynamic_systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,7 +463,7 @@ def __init__(
inv_obs_weight: List[float] = None,
sim_data: np.ndarray = None,
callback: Callable = None,
param_data_file: str = None,
param_data_file: str = '',
param_data: np.ndarray = None,
param_names: List[str] = None,
):
Expand Down Expand Up @@ -539,7 +539,7 @@ def from_dict(cls: Type["IODynamicSystem"], obj: dict):
obs_data_file=obj["obs_data_file"],
obs_names=obj["obs_names"],
ctrl_name=obj["ctrl_name"],
param_data_file=obj.get("param_data_file", None),
param_data_file=obj.get("param_data_file", ''),
obs_data=obj.get("obs_data", None),
num_samples=obj.get("num_samples", None),
param_min=obj.get("param_min", None),
Expand All @@ -566,9 +566,7 @@ def get_obs_data(self):
self.num_steps = len(self.ctrl_data)
# remove the data not used by Bayesian filtering
self.num_obs = len(self.obs_names)
for key in keys_and_data:
if key not in self.obs_names:
keys_and_data.pop(key)
keys_and_data = {key: keys_and_data[key] for key in self.obs_names}
# assign the obs_data array
self.obs_data = np.zeros([self.num_obs, self.num_steps])
for i, key in enumerate(self.obs_names):
Expand Down Expand Up @@ -613,24 +611,17 @@ def load_sim_data(self):
for i, sim_data_file in enumerate(self.sim_data_files):
if self.sim_data_file_ext != '.npy':
data = get_keys_and_data(sim_data_file)
param_data = np.genfromtxt(sim_data_file.split('_sim')[0] + f'_param{self.sim_data_file_ext}')
for j, key in enumerate(self.param_names):
data[key] = param_data[j]
param_data = get_keys_and_data(sim_data_file.split('_sim')[0] + f'_param{self.sim_data_file_ext}')
for key in self.param_names:
data[key] = param_data[key][0]
else:
data = np.load(sim_data_file, allow_pickle=True).item()

for j, key in enumerate(self.obs_names):
self.sim_data[i, j, :] = data[key]

params = np.array([data[key] for key in self.param_names])
if not (np.abs((params - self.param_data[i, :])
/ self.param_data[i, :] < 1e-5).all()):
raise RuntimeError(
"Parameters [" + ", ".join(
[f"{v}" for v in self.param_data[i, :]])
+ '] vs [' +
", ".join(f"{v}" for v in params) +
f"] from the simulation data file {sim_data_file} and the parameter table do not match")
np.testing.assert_allclose(params, self.param_data[i, :], rtol=1e-5)

def load_param_data(self, curr_iter: int = 0):
"""
Expand All @@ -643,15 +634,17 @@ def load_param_data(self, curr_iter: int = 0):
self.param_data = np.genfromtxt(self.param_data_file, comments='!')[:, -self.num_params:]
self.num_samples = self.param_data.shape[0]
else:
# if param_data_file does not exit, get parameter daa from simulation data files
files = glob(self.sim_data_dir + f'/iter{curr_iter}/{self.sim_name}*{self.sim_data_file_ext}')
# if param_data_file does not exit, get parameter data from text files
files = glob(self.sim_data_dir + f'/iter{curr_iter}/{self.sim_name}*_param*{self.sim_data_file_ext}')
self.num_samples = len(files)
self.sim_data_files = sorted(files)
self.param_data = np.zeros([self.num_samples, self.num_params])
for i, sim_data_file in enumerate(self.sim_data_files):
# TODO: this is still for npy, support text file formats
data = np.load(sim_data_file, allow_pickle=True).item()
params = [data[key] for key in self.param_names]
if self.sim_data_file_ext == '.npy':
data = np.load(sim_data_file, allow_pickle=True).item()
else:
data = get_keys_and_data(sim_data_file)
params = [data[key][0] for key in self.param_names]
self.param_data[i, :] = params

def run(self, **kwargs):
Expand Down Expand Up @@ -692,7 +685,7 @@ def write_params_to_table(self, curr_iter: int):
:return param_data_file: The name of the parameter data file
"""
self.param_data_file = write_to_table(
f'{os.getcwd()}/{self.sim_name}',
self.sim_name,
self.param_data,
self.param_names,
curr_iter)
Expand Down
33 changes: 18 additions & 15 deletions grainlearning/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import subprocess
from typing import List, Callable
import numpy as np
import matplotlib.pylab as plt
from sklearn.mixture import BayesianGaussianMixture
from scipy.spatial import Voronoi, ConvexHull

Expand Down Expand Up @@ -60,14 +59,14 @@ def write_to_table(sim_name, table, names, curr_iter=0, threads=8):
"""

# Computation of decimal number for unique key
table_file_name = f'{sim_name}_Iter{curr_iter}_Samples.txt'
table_file_name = f'{os.getcwd()}/{sim_name}_Iter{curr_iter}_Samples.txt'

with open(table_file_name, 'w') as f_out:
num, dim = table.shape
mag = math.floor(math.log(num, 10)) + 1
f_out.write(' '.join(['!OMP_NUM_THREADS', 'description', 'key'] + names + ['\n']))
for j in range(num):
description = 'Iter' + str(curr_iter) + '-Sample' + str(j).zfill(mag)
description = f'{sim_name}_Iter' + str(curr_iter) + '-Sample' + str(j).zfill(mag)
f_out.write(' '.join(
[f'{threads:2d}'] + [description] +
[f'{j:9d}'] + [f'{table[j][i]:20.10e}' for i in range(dim)] + ['\n']))
Expand All @@ -84,14 +83,7 @@ def get_keys_and_data(file_name: str, delimiters=None):
"""
if delimiters is None:
delimiters = ['\t', ' ', ',']
data = np.genfromtxt(file_name)

try:
nc_ols = data.shape[1]
except IndexError:
n_rows = data.shape[0]
nc_ols = 1
data = data.reshape([n_rows, 1])
data = np.genfromtxt(file_name, ndmin=2)

with open(file_name, 'r') as f_open:
first_line = f_open.read().splitlines()[0]
Expand All @@ -102,7 +94,7 @@ def get_keys_and_data(file_name: str, delimiters=None):
keys.remove('#')
# remove empty strings from the list
keys = list(filter(None, keys))
if len(keys) == nc_ols:
if len(keys) == data.shape[1]:
break

# store data in a dictionary
Expand Down Expand Up @@ -409,6 +401,7 @@ def plot_param_stats(fig_name, param_names, means, covs, save_fig=0):
:param covs: ndarray
:param save_fig: bool defaults to False
"""
import matplotlib.pylab as plt
num = len(param_names)
n_cols = int(np.ceil(num / 2))
plt.figure('Posterior means of the parameters')
Expand Down Expand Up @@ -449,6 +442,10 @@ def plot_posterior(fig_name, param_names, param_data, posterior, save_fig=0):
:param posterior: ndarray
:param save_fig: bool defaults to False
"""
try:
import matplotlib.pylab as plt
except ImportError:
print('matplotlib is not installed, cannot plot posterior distribution. Please install with grainlearning[plot]')
num_steps = posterior.shape[0]
for i, name in enumerate(param_names):
plt.figure(f'Posterior distribution of {name}')
Expand All @@ -468,6 +465,7 @@ def plot_posterior(fig_name, param_names, param_data, posterior, save_fig=0):


def plot_param_data(fig_name, param_names, param_data_list, save_fig=0):
import matplotlib.pylab as plt
num = len(param_names)
n_cols = int(np.ceil(num / 2))
num = num - 1
Expand Down Expand Up @@ -501,6 +499,7 @@ def plot_obs_and_sim(fig_name, ctrl_name, obs_names, ctrl_data, obs_data, sim_da
:param posterior: ndarray
:param save_fig: bool defaults to False
"""
import matplotlib.pylab as plt
ensemble_mean = np.einsum('ijk, ki->jk', sim_data, posteriors)
ensemble_std = np.einsum('ijk, ki->jk', (sim_data - ensemble_mean) ** 2, posteriors)
ensemble_std = np.sqrt(ensemble_std)
Expand Down Expand Up @@ -549,6 +548,10 @@ def write_dict_to_file(data, file_name):
with open(file_name, 'w') as f:
keys = data.keys()
f.write('# ' + ' '.join(keys) + '\n')
num = len(data[list(keys)[0]])
for i in range(num):
f.write(' '.join([str(data[key][i]) for key in keys]) + '\n')
# check if data[list(keys)[0]] is a list
if isinstance(data[list(keys)[0]], list):
num = len(data[list(keys)[0]])
for i in range(num):
f.write(' '.join([str(data[key][i]) for key in keys]) + '\n')
else:
f.write(' '.join([str(data[key]) for key in keys]) + '\n')
8 changes: 7 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,16 @@ pytest = {version = "^6.2.4", optional = true}
pytest-cov = {version = "^2.12.1", optional = true}
prospector = {version = "^1.7.6", optional = true, extras = ["with_pyroma"]}
pyroma = {version = "^4.0", optional = true}
h5py = {version ="^3.7.0", optional = true}
wandb = {version ="^0.13.4", optional = true}
tensorflow = {version ="2.10.0", optional = true}
ipykernel = {version = "*", optional = true}

[tool.poetry.extras]
docs = ["Sphinx", "sphinx-autodoc-typehints", "sphinx-mdinclude", "sphinx-rtd-theme"]
dev = ["pytest", "pytest-cov", "prospector", "pyroma"]
dev = ["pytest", "pytest-cov", "prospector", "pyroma", "h5py"]
rnn = ["wandb", "tensorflow"]
tutorials = ["ipykernel"]

[build-system]
requires = ["poetry-core>=1.0.0"]
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion tests/integration/test_gmm.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def test_gmm():
"num_iter": 0,
"system": {
"system_type": IODynamicSystem,
"obs_data_file": f'{sim_data_dir}/linearObs.dat',
"obs_data_file": f'{sim_data_dir}/linear_obs.dat',
"obs_names": ['f'],
"ctrl_name": 'u',
"sim_name": 'linear',
Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_lenreg_IO.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

def run_sim(model, **kwargs):
"""
Runs the external executable and passes the parameter sample to generate the output file.
Run the external executable and passes the parameter sample to generate the output file.
"""
# keep the naming convention consistent between iterations
mag = floor(log(model.num_samples, 10)) + 1
Expand All @@ -40,7 +40,7 @@ def test_lenreg_IO():
"param_names": ['a', 'b'],
"num_samples": 20,
"obs_data_file": os.path.abspath(
os.path.join(__file__, "../..")) + '/data/linear_sim_data/linearObs.dat',
os.path.join(__file__, "../..")) + '/data/linear_sim_data/linear_obs.dat',
"obs_names": ['f'],
"ctrl_name": 'u',
"sim_name": 'linear',
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/test_smc.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def test_smc():
"num_iter": 0,
"system": {
"system_type": IODynamicSystem,
"obs_data_file": f'{sim_data_dir}/linearObs.dat',
"obs_data_file": f'{sim_data_dir}/linear_obs.dat',
"obs_names": ['f'],
"ctrl_name": 'u',
"sim_name": 'linear',
Expand Down
45 changes: 16 additions & 29 deletions tests/unit/test_dynamic_systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,26 +193,20 @@ class TestIODynamicSystem:

def test_init(self):
"""Test if the class is initialized correctly"""
system_cls = IODynamicSystem(
param_min=[0.001, 0.001],
param_max=[1, 10],
param_names=['a', 'b'],
num_samples=20,
obs_data_file=path.abspath(path.join(__file__, "../..")) + '/data/linear_sim_data/linearObs.dat',
obs_names=['f'],
ctrl_name='u',
sim_name='linear',
sim_data_dir=path.abspath(path.join(__file__, "../..")) + '/data/linear_sim_data/',
sim_data_file_ext='.txt',
)
system_cls = IODynamicSystem(sim_name='linear',
sim_data_dir=path.abspath(path.join(__file__, "../..")) + '/data/linear_sim_data/',
sim_data_file_ext='.txt', obs_data_file=path.abspath(
path.join(__file__, "../..")) + '/data/linear_sim_data/linear_obs.dat', obs_names=['f'], ctrl_name='u',
num_samples=20, param_min=[0.001, 0.001], param_max=[1, 10],
param_names=['a', 'b'])

config = {
"system_type": IODynamicSystem,
"param_min": [0.001, 0.001],
"param_max": [1, 10],
"param_names": ['a', 'b'],
"num_samples": 20,
"obs_data_file": path.abspath(path.join(__file__, "../..")) + '/data/linear_sim_data/linearObs.dat',
"obs_data_file": path.abspath(path.join(__file__, "../..")) + '/data/linear_sim_data/linear_obs.dat',
"obs_names": ['f'],
"ctrl_name": 'u',
"sim_name": 'linear',
Expand Down Expand Up @@ -246,27 +240,20 @@ def run_sim(system, **kwargs):
data.append(np.array(y, ndmin=2))
# Write the data to a file
data_file_name = f'{sim_name}_' + description + '_sim.txt'
write_dict_to_file({'f': y}, data_file_name)
write_dict_to_file({'f': list(y)}, data_file_name)
# Write the parameters to a file
data_param_name = f'{sim_name}_' + description + '_param.txt'
param_data = {'param0': [param[0]], 'param1': [param[1]], 'param2': [param[2]], 'param3': [param[3]]}
param_data = {'a': [param[0]], 'b': [param[1]], 'c': [param[2]], 'd': [param[3]]}
write_dict_to_file(param_data, data_param_name)
# Set the simulation data
system.set_sim_data(data)

system_cls = IODynamicSystem(
param_min=[None, None, None, None],
param_max=[None, None, None, None],
param_names=['a', 'b', 'c', 'd'],
num_samples=10,
obs_data_file=path.abspath(path.join(__file__, "../..")) + '/data/linear_sim_data/linearObs.dat',
obs_names=['f'],
ctrl_name='u',
sim_name='test',
sim_data_dir=PATH + '/sim_data/',
sim_data_file_ext='.txt',
callback=run_sim,
)
system_cls = IODynamicSystem(sim_name='test', sim_data_dir=PATH + '/sim_data/', sim_data_file_ext='.txt',
obs_data_file=path.abspath(
path.join(__file__, "../..")) + '/data/linear_sim_data/linear_obs.dat',
obs_names=['f'], ctrl_name='u', num_samples=10, param_min=[None, None, None, None],
param_max=[None, None, None, None], callback=run_sim,
param_names=['a', 'b', 'c', 'd'])

system_cls.param_data = np.arange(1, system_cls.num_samples * 4 + 1, dtype=float).reshape(
system_cls.num_samples, 4)
Expand Down Expand Up @@ -301,7 +288,7 @@ def test_get_obs_data(self):
"system_type": IODynamicSystem,
"param_min": [0.001, 0.001],
"param_max": [1, 10],
"obs_data_file": path.abspath(path.join(__file__, "../../data/linear_sim_data/linearObs.dat")),
"obs_data_file": path.abspath(path.join(__file__, "../../data/linear_sim_data/linear_obs.dat")),
"obs_names": ['f'],
"ctrl_name": 'u',
"sim_name": 'linear',
Expand Down
20 changes: 6 additions & 14 deletions tests/unit/test_iterative_bayesian_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,20 +171,12 @@ def test_run_inference():
def test_save_and_load_proposal():
"""Test if the proposal density can be loaded from a file"""
#: Initialize a system object (note the observed data is not used in this test)
system_cls = IODynamicSystem(
sim_name='test_ibf',
sim_data_dir=PATH + '/sim_data/',
sim_data_file_ext='.txt',
obs_names=['f'],
ctrl_name='u',
num_samples=10,
param_min=[1e6, 0.2],
param_max=[1e7, 0.5],
obs_data=[[12, 3, 4, 4], [12, 4, 5, 4]],
ctrl_data=[1, 2, 3, 4],
param_names=['a', 'b'],
obs_data_file=os.path.abspath(os.path.join(__file__, "../..")) + '/data/linear_sim_data/linearObs.dat',
)
system_cls = IODynamicSystem(sim_name='test_ibf', sim_data_dir=PATH + '/sim_data/', sim_data_file_ext='.txt',
obs_data_file=os.path.abspath(
os.path.join(__file__, "../..")) + '/data/linear_sim_data/linear_obs.dat',
obs_names=['f'], ctrl_name='u', num_samples=10, param_min=[1e6, 0.2],
param_max=[1e7, 0.5], obs_data=[[12, 3, 4, 4], [12, 4, 5, 4]], ctrl_data=[1, 2, 3, 4],
param_names=['a', 'b'])

#: Assert that the inference runs correctly if a proposal density is provided
ibf_cls = IterativeBayesianFilter.from_dict(
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit 65da839

Please sign in to comment.