diff --git a/.pylintrc b/.pylintrc
index a1ccb0e..616c20a 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -82,7 +82,7 @@ persistent=yes
# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
-py-version=3.7
+py-version=3.8
# Discover python modules and packages in the file system subtree.
recursive=no
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 895d99f..e2a2939 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -9,10 +9,13 @@ If you would like to implement a new feature or a bug, please make sure you (or
### Creating a Pull Request
1. [Fork](https://github.com/cnellington/Contextualized/fork) this repository.
-2. Make your code changes locally.
-3. Check the style using pylint and black following [these steps](https://github.com/cnellington/Contextualized/pull/111#issue-1323230194).
-4. (Optional) Include your name in alphabetical order in [ACKNOWLEDGEMENTS.md](https://github.com/cnellington/Contextualized/blob/main/ACKNOWLEDGEMENTS.md).
-5. Issue a PR to merge your changes into the `dev` branch.
+2. Install locally with `pip install -e .`.
+3. Install extra developer dependencies with `pip install -r dev_requirements.txt`.
+4. Make your code changes locally.
+5. Automatically format your code and check for style issues by running `format_style.sh`. We are working on linting the entire repo, but please make sure your code is cleared by pylint.
+6. Automatically update our documentation by running `update_docs.sh`.
+7. (Optional) Include your name in alphabetical order in [ACKNOWLEDGEMENTS.md](https://github.com/cnellington/Contextualized/blob/main/ACKNOWLEDGEMENTS.md).
+8. Issue a PR to merge your changes into the `main` branch.
## Issues
diff --git a/README.md b/README.md
index c38021c..98e7787 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-![Preview](contextualized_logo.png)
+![Preview](docs/logo.png)
#
![License](https://img.shields.io/github/license/cnellington/contextualized.svg?style=flat-square)
@@ -10,7 +10,7 @@
-A statistical machine learning toolbox for estimating models, distributions, and functions with context-specific parameters.
+An easy-to-use machine learning toolbox for estimating models, distributions, and functions with context-specific parameters.
Context-specific parameters:
- Find hidden heterogeneity in data -- are all samples the same?
@@ -66,13 +66,16 @@ Feel free to add your own page(s) by sending a PR or request an improvement by c
-ContextualizedML was originally implemented by [Caleb Ellington](https://calebellington.com/) (CMU) and [Ben Lengerich](http://web.mit.edu/~blengeri/www) (MIT).
+Contextualized ML was originally implemented by [Caleb Ellington](https://calebellington.com/) (CMU) and [Ben Lengerich](http://web.mit.edu/~blengeri/www) (MIT).
Many people have helped. Check out [ACKNOWLEDGEMENTS.md](https://github.com/cnellington/Contextualized/blob/main/ACKNOWLEDGEMENTS.md)!
## Related Publications and Pre-prints
+- [Contextualized Machine Learning](https://arxiv.org/abs/2310.11340)
+- [Contextualized Networks Reveal Heterogeneous Transcriptomic Regulation in Tumors at Sample-Specific Resolution](https://www.biorxiv.org/content/10.1101/2023.12.01.569658v1)
+- [Contextualized Policy Recovery: Modeling and Interpreting Medical Decisions with Adaptive Imitation Learning](https://arxiv.org/abs/2310.07918)
- [Automated Interpretable Discovery of Heterogeneous Treatment Effectiveness: A COVID-19 Case Study](https://www.sciencedirect.com/science/article/pii/S1532046422001022)
- [NOTMAD: Estimating Bayesian Networks with Sample-Specific Structures and Parameters](http://arxiv.org/abs/2111.01104)
- [Discriminative Subtyping of Lung Cancers from Histopathology Images via Contextual Deep Learning](https://www.medrxiv.org/content/10.1101/2020.06.25.20140053v1.abstract)
diff --git a/contextualized/analysis/__init__.py b/contextualized/analysis/__init__.py
index b08ac8e..0812ce8 100644
--- a/contextualized/analysis/__init__.py
+++ b/contextualized/analysis/__init__.py
@@ -12,3 +12,8 @@
plot_homogeneous_predictor_effects,
plot_heterogeneous_predictor_effects,
)
+from contextualized.analysis.pvals import (
+ calc_homogeneous_context_effects_pvals,
+ calc_homogeneous_predictor_effects_pvals,
+ calc_heterogeneous_predictor_effects_pvals,
+)
diff --git a/contextualized/analysis/accuracy_split.py b/contextualized/analysis/accuracy_split.py
index 06e7cdd..8bdb5fc 100644
--- a/contextualized/analysis/accuracy_split.py
+++ b/contextualized/analysis/accuracy_split.py
@@ -1,8 +1,10 @@
"""
Utilities for post-hoc analysis of trained Contextualized models.
"""
+from typing import *
import numpy as np
+import pandas as pd
from sklearn.metrics import roc_auc_score as roc
@@ -14,11 +16,25 @@ def get_roc(Y_true, Y_pred):
return np.nan
-def print_acc_by_covars(Y_true, Y_pred, covar_df, **kwargs):
+def print_acc_by_covars(
+ Y_true: np.ndarray, Y_pred: np.ndarray, covar_df: pd.DataFrame, **kwargs
+) -> None:
"""
Prints Accuracy for different data splits with covariates.
- Assume Y_true and Y_pred are np arrays.
- Allows train_idx and test_idx as Boolean locators.
+
+ Args:
+ Y_true (np.ndarray): True labels.
+ Y_pred (np.ndarray): Predicted labels.
+ covar_df (pd.DataFrame): DataFrame of covariates.
+ max_classes (int, optional): Maximum number of classes to print. Defaults to 20.
+ covar_stds (np.ndarray, optional): Standard deviations of covariates. Defaults to None.
+ covar_means (np.ndarray, optional): Means of covariates. Defaults to None.
+ covar_encoders (List[LabelEncoder], optional): Encoders for covariates. Defaults to None.
+ train_idx (np.ndarray, optional): Boolean array indicating training data. Defaults to None.
+ test_idx (np.ndarray, optional): Boolean array indicating testing data. Defaults to None.
+
+ Returns:
+ None
"""
Y_true = np.squeeze(Y_true)
Y_pred = np.squeeze(Y_pred)
diff --git a/contextualized/analysis/bootstraps.py b/contextualized/analysis/bootstraps.py
index 6204fc9..a9e2eba 100644
--- a/contextualized/analysis/bootstraps.py
+++ b/contextualized/analysis/bootstraps.py
@@ -1,5 +1,6 @@
# Utility functions for bootstraps
+
def select_good_bootstraps(sklearn_wrapper, train_errs, tol=2, **kwargs):
"""
Select bootstraps that are good for a given model.
@@ -19,5 +20,6 @@ def select_good_bootstraps(sklearn_wrapper, train_errs, tol=2, **kwargs):
train_errs_by_bootstrap = np.mean(train_errs, axis=(1, 2))
sklearn_wrapper.models = sklearn_wrapper.models[
- train_errs_by_bootstrap < tol*np.min(train_errs_by_bootstrap)]
+ train_errs_by_bootstrap < tol * np.min(train_errs_by_bootstrap)
+ ]
return sklearn_wrapper
diff --git a/contextualized/analysis/effects.py b/contextualized/analysis/effects.py
index 6c15769..0787b6e 100644
--- a/contextualized/analysis/effects.py
+++ b/contextualized/analysis/effects.py
@@ -1,21 +1,29 @@
"""
Utilities for plotting effects learned by Contextualized models.
"""
-
+from typing import *
import numpy as np
import matplotlib.pyplot as plt
+from contextualized.easy.wrappers import SKLearnWrapper
+
def simple_plot(
- x_vals,
- y_vals,
+ x_vals: List[Union[float, int]],
+ y_vals: List[Union[float, int]],
**kwargs,
-):
+) -> None:
"""
- Simple plotting of xs and ys with kwargs passed to mpl helpers.
- :param x_vals:
- :param y_vals:
+ Simple plotting of y vs x with kwargs passed to matplotlib helpers.
+
+ Args:
+ x_vals: x values to plot
+ y_vals: y values to plot
+ **kwargs: kwargs passed to matplotlib helpers (fill_alpha, fill_color, y_lowers, y_uppers, x_label, y_label, x_ticks, x_ticklabels, y_ticks, y_ticklabels)
+
+ Returns:
+ None
"""
plt.figure(figsize=kwargs.get("figsize", (8, 8)))
if "y_lowers" in kwargs and "y_uppers" in kwargs:
@@ -84,16 +92,25 @@ def plot_effect(x_vals, y_means, y_lowers=None, y_uppers=None, **kwargs):
)
-def get_homogeneous_context_effects(model, C, **kwargs):
+def get_homogeneous_context_effects(
+ model: SKLearnWrapper, C: np.ndarray, **kwargs
+) -> Tuple[np.ndarray, np.ndarray]:
"""
Get the homogeneous (context-invariant) effects of context.
- :param model:
- :param C:
- returns:
- c_vis: the context values that were used to estimate the effects
- effects: np array of effects, one for each context. Each homogeneous effect is a matrix of shape:
- (n_bootstraps, n_context_vals, n_outcomes).
+ Args:
+ model (SKLearnWrapper): a fitted ``contextualized.easy`` model
+ C: the context values to use to estimate the effects
+ verbose (bool, optional): print progess. Default True.
+ individual_preds (bool, optional): whether to use plot each bootstrap. Default True.
+ C_vis (np.ndarray, optional): Context bins used to visualize context (n_vis, n_contexts). Default None to construct anew.
+ n_vis (int, optional): Number of bins to use to visualize context. Default 1000.
+
+ Returns:
+ Tuple[np.ndarray, np.ndarray]:
+ c_vis: the context values that were used to estimate the effects
+ effects: array of effects, one for each context. Each homogeneous effect is a matrix of shape:
+ (n_bootstraps, n_context_vals, n_outcomes).
"""
if kwargs.get("verbose", True):
print("Estimating Homogeneous Contextual Effects.")
@@ -233,14 +250,32 @@ def plot_boolean_vars(names, y_mean, y_err, **kwargs):
def plot_homogeneous_context_effects(
- model,
- C,
+ model: SKLearnWrapper,
+ C: np.ndarray,
**kwargs,
-):
+) -> None:
"""
- Plot the homogeneous (context-invariant) effects of context.
- :param model:
- :param C:
+ Plot the direct effect of context on outcomes, disregarding other features.
+ This context effect is homogeneous in that it is a static function of context (context-invariant).
+
+ Args:
+ model (SKLearnWrapper): a fitted ``contextualized.easy`` model
+ C: the context values to use to estimate the effects
+ verbose (bool, optional): print progess. Default True.
+ individual_preds (bool, optional): whether to use plot each bootstrap. Default True.
+ C_vis (np.ndarray, optional): Context bins used to visualize context (n_vis, n_contexts). Default None to construct anew.
+ n_vis (int, optional): Number of bins to use to visualize context. Default 1000.
+ lower_pct (int, optional): Lower percentile for bootstraps. Default 2.5.
+ upper_pct (int, optional): Upper percentile for bootstraps. Default 97.5.
+ classification (bool, optional): Whether to exponentiate the effects. Default True.
+ C_encoders (List[sklearn.preprocessing.LabelEncoder], optional): encoders for each context. Default None.
+ C_means (np.ndarray, optional): means for each context. Default None.
+ C_stds (np.ndarray, optional): standard deviations for each context. Default None.
+ xlabel_prefix (str, optional): prefix for x label. Default "".
+ figname (str, optional): name of figure to save. Default None.
+
+ Returns:
+ None
"""
c_vis, effects = get_homogeneous_context_effects(model, C, **kwargs)
# effects.shape is (n_context, n_bootstraps, n_context_vals, n_outcomes)
@@ -283,16 +318,34 @@ def plot_homogeneous_context_effects(
def plot_homogeneous_predictor_effects(
- model,
- C,
- X,
+ model: SKLearnWrapper,
+ C: np.ndarray,
+ X: np.ndarray,
**kwargs,
-):
+) -> None:
"""
- Plot the homogeneous (context-invariant) effects of predictors.
- :param model:
- :param C:
- :param X:
+ Plot the effect of predictors on outcomes that do not change with context (homogeneous).
+
+ Args:
+ model (SKLearnWrapper): a fitted ``contextualized.easy`` model
+ C: the context values to use to estimate the effects
+ X: the predictor values to use to estimate the effects
+ max_classes_for_discrete (int, optional): maximum number of classes to treat as discrete. Default 10.
+ min_effect_size (float, optional): minimum effect size to plot. Default 0.003.
+ ylabel (str, optional): y label for plot. Default "Influence of ".
+ xlabel_prefix (str, optional): prefix for x label. Default "".
+ X_names (List[str], optional): names of predictors. Default None.
+ X_encoders (List[sklearn.preprocessing.LabelEncoder], optional): encoders for each predictor. Default None.
+ X_means (np.ndarray, optional): means for each predictor. Default None.
+ X_stds (np.ndarray, optional): standard deviations for each predictor. Default None.
+ verbose (bool, optional): print progess. Default True.
+ lower_pct (int, optional): Lower percentile for bootstraps. Default 2.5.
+ upper_pct (int, optional): Upper percentile for bootstraps. Default 97.5.
+ classification (bool, optional): Whether to exponentiate the effects. Default True.
+ figname (str, optional): name of figure to save. Default None.
+
+ Returns:
+ None
"""
c_vis = np.zeros_like(C.values)
x_vis = make_grid_mat(X.values, 1000)
@@ -355,19 +408,31 @@ def plot_homogeneous_predictor_effects(
def plot_heterogeneous_predictor_effects(model, C, X, **kwargs):
"""
- Plot the heterogeneous (context-dependent) effects of context.
- :param model:
- :param C:
- :param X:
- :param encoders:
- :param C_means:
- :param C_stds:
- :param X_names:
- :param ylabel: (Default value = "Influence of ")
- :param min_effect_size: (Default value = 0.003)
- :param n_vis: (Default value = 1000)
- :param max_classes_for_discrete: (Default value = 10)
-
+ Plot how the effect of predictors on outcomes changes with context (heterogeneous).
+
+ Args:
+ model (SKLearnWrapper): a fitted ``contextualized.easy`` model
+ C: the context values to use to estimate the effects
+ X: the predictor values to use to estimate the effects
+ max_classes_for_discrete (int, optional): maximum number of classes to treat as discrete. Default 10.
+ min_effect_size (float, optional): minimum effect size to plot. Default 0.003.
+ y_prefix (str, optional): y prefix for plot. Default "Influence of ".
+ X_names (List[str], optional): names of predictors. Default None.
+ verbose (bool, optional): print progess. Default True.
+ individual_preds (bool, optional): whether to use plot each bootstrap. Default True.
+ C_vis (np.ndarray, optional): Context bins used to visualize context (n_vis, n_contexts). Default None to construct anew.
+ n_vis (int, optional): Number of bins to use to visualize context. Default 1000.
+ lower_pct (int, optional): Lower percentile for bootstraps. Default 2.5.
+ upper_pct (int, optional): Upper percentile for bootstraps. Default 97.5.
+ classification (bool, optional): Whether to exponentiate the effects. Default True.
+ C_encoders (List[sklearn.preprocessing.LabelEncoder], optional): encoders for each context. Default None.
+ C_means (np.ndarray, optional): means for each context. Default None.
+ C_stds (np.ndarray, optional): standard deviations for each context. Default None.
+ xlabel_prefix (str, optional): prefix for x label. Default "".
+ figname (str, optional): name of figure to save. Default None.
+
+ Returns:
+ None
"""
c_vis = maybe_make_c_vis(C, **kwargs)
n_vis = c_vis.shape[0]
diff --git a/contextualized/analysis/embeddings.py b/contextualized/analysis/embeddings.py
index d21257c..9cb649b 100644
--- a/contextualized/analysis/embeddings.py
+++ b/contextualized/analysis/embeddings.py
@@ -1,9 +1,10 @@
"""
Utilities for plotting embeddings of fitted Contextualized models.
"""
-
+from typing import *
import numpy as np
+import pandas as pd
import matplotlib.pyplot as plt
import matplotlib as mpl
@@ -11,16 +12,26 @@
def plot_embedding_for_all_covars(
- reps, covars_df, covars_stds=None, covars_means=None, covars_encoders=None, **kwargs
-):
+ reps: np.ndarray,
+ covars_df: pd.DataFrame,
+ covars_stds: np.ndarray = None,
+ covars_means: np.ndarray = None,
+ covars_encoders: List[Callable] = None,
+ **kwargs,
+) -> None:
"""
Plot embeddings of representations for all covariates in a Pandas dataframe.
- :param reps:
- :param covars_df:
- :param covars_stds: Used to project back to readable values. (Default value = None)
- :param covars_means: Used to project back to readable values. (Default value = None)
- :param covars_encoders: Used to project back to readable values. (Default value = None)
- :param kwargs: Keyword arguments for plotting.
+
+ Args:
+ reps (np.ndarray): Embeddings of shape (n_samples, n_dims).
+ covars_df (pd.DataFrame): DataFrame of covariates.
+ covars_stds (np.ndarray, optional): Standard deviations of covariates. Defaults to None.
+ covars_means (np.ndarray, optional): Means of covariates. Defaults to None.
+ covars_encoders (List[LabelEncoder], optional): Encoders for covariates. Defaults to None.
+ kwargs: Keyword arguments for plotting.
+
+ Returns:
+ None
"""
for i, covar in enumerate(covars_df.columns):
my_labels = covars_df.iloc[:, i].values
@@ -49,17 +60,20 @@ def plot_embedding_for_all_covars(
def plot_lowdim_rep(
- low_dim,
- labels,
+ low_dim: np.ndarray,
+ labels: np.ndarray,
**kwargs,
):
"""
+ Plot a low-dimensional representation of a dataset.
- :param low_dim:
- :param labels:
- :param kwargs:
- Keyword arguments.
+ Args:
+ low_dim (np.ndarray): Low-dimensional representation of shape (n_samples, 2).
+ labels (np.ndarray): Labels of shape (n_samples,).
+ kwargs: Keyword arguments for plotting.
+ Returns:
+ None
"""
if len(set(labels)) < kwargs.get("max_classes_for_discrete", 10): # discrete labels
diff --git a/contextualized/analysis/pvals.py b/contextualized/analysis/pvals.py
index e98efd0..c745199 100644
--- a/contextualized/analysis/pvals.py
+++ b/contextualized/analysis/pvals.py
@@ -2,6 +2,7 @@
Analysis tools for generating pvalues from bootstrap replicates.
"""
+from typing import *
import numpy as np
@@ -10,6 +11,7 @@
get_homogeneous_predictor_effects,
get_heterogeneous_predictor_effects,
)
+from contextualized.easy.wrappers import SKLearnWrapper
def calc_pval_bootstraps_one_sided(estimates, thresh=0, laplace_smoothing=1):
@@ -48,19 +50,19 @@ def calc_pval_bootstraps_one_sided_mean(estimates, laplace_smoothing=1):
)
-def calc_homogeneous_context_effects_pvals(model, C, **kwargs):
+def calc_homogeneous_context_effects_pvals(
+ model: SKLearnWrapper, C: np.ndarray, **kwargs
+) -> np.ndarray:
"""
Calculate p-values for the effects of context.
- Parameters
- ----------
- model : contextualized.models.Model
- C : np.ndarray
+ Args:
+ model (SKLearnWrapper): Model to analyze.
+ C (np.ndarray): Contexts to analyze.
- Returns
- -------
- pvals : np.ndarray of shape (n_contexts, n_outcomes) testing whether the
- sign is consistent across bootstraps
+ Returns:
+ np.ndarray: P-values of shape (n_contexts, n_outcomes) testing whether the
+ sign of the direct effect of context on outcomes is consistent across bootstraps.
"""
_, effects = get_homogeneous_context_effects(model, C, **kwargs)
# effects.shape: (n_contexts, n_bootstraps, n_context_vals, n_outcomes)
@@ -86,19 +88,19 @@ def calc_homogeneous_context_effects_pvals(model, C, **kwargs):
return pvals
-def calc_homogeneous_predictor_effects_pvals(model, C, **kwargs):
+def calc_homogeneous_predictor_effects_pvals(
+ model: SKLearnWrapper, C: np.ndarray, **kwargs
+) -> np.ndarray:
"""
- Calculate p-values for the effects of predictors.
+ Calculate p-values for the context-invariant effects of predictors.
- Parameters
- ----------
- model : contextualized.models.Model
- C : np.ndarray
+ Args:
+ model (SKLearnWrapper): Model to analyze.
+ C (np.ndarray): Contexts to analyze.
- Returns
- -------
- pvals : np.ndarray of shape (n_predictors, n_outcomes) testing whether the
- sign is consistent across bootstraps
+ Returns:
+ np.ndarray: P-values of shape (n_predictors, n_outcomes) testing whether the
+ sign of the context-invariant predictor effects are consistent across bootstraps.
"""
_, effects = get_homogeneous_predictor_effects(model, C, **kwargs)
# effects.shape: (n_predictors, n_bootstraps, n_outcomes)
@@ -126,15 +128,13 @@ def calc_heterogeneous_predictor_effects_pvals(model, C, **kwargs):
"""
Calculate p-values for the heterogeneous effects of predictors.
- Parameters
- ----------
- model : contextualized.models.Model
- C : np.ndarray
+ Args:
+ model (SKLearnWrapper): Model to analyze.
+ C (np.ndarray): Contexts to analyze.
- Returns
- -------
- pvals : np.ndarray of shape (n_contexts, n_predictors, n_outcomes) testing
- whether the sign of the change wrt context is consistent across bootstraps
+ Returns:
+ np.ndarray: P-values of shape (n_contexts, n_predictors, n_outcomes) testing whether the
+ context-varying parameter range is consistent across bootstraps.
"""
_, effects = get_heterogeneous_predictor_effects(model, C, **kwargs)
# effects.shape is (n_contexts, n_predictors, n_bootstraps, n_context_vals, n_outcomes)
diff --git a/contextualized/analysis/utils.py b/contextualized/analysis/utils.py
index e420e4b..439f590 100644
--- a/contextualized/analysis/utils.py
+++ b/contextualized/analysis/utils.py
@@ -2,16 +2,20 @@
Miscellaneous utility functions.
"""
+from typing import *
+
import numpy as np
-def convert_to_one_hot(col):
+def convert_to_one_hot(col: Collection[Any]) -> Tuple[np.ndarray, List[Any]]:
"""
+ Converts a categorical variable to a one-hot vector.
- :param col: np array with observations
-
- returns col converted to one-hot values, and list of one-hot values.
+ Args:
+ col (Collection[Any]): The categorical variable.
+ Returns:
+ Tuple[np.ndarray, List[Any]]: The one-hot vector and the possible values.
"""
vals = list(set(col))
one_hot_vars = np.array([vals.index(x) for x in col], dtype=np.float32)
diff --git a/contextualized/dags/lightning_modules.py b/contextualized/dags/lightning_modules.py
index 9bda98c..a6099dc 100644
--- a/contextualized/dags/lightning_modules.py
+++ b/contextualized/dags/lightning_modules.py
@@ -28,7 +28,12 @@
}
DEFAULT_DAG_LOSS_TYPE = "NOTEARS"
DEFAULT_DAG_LOSS_PARAMS = {
- "NOTEARS": {"alpha": 1e-1, "rho": 1e-2, "tol": 0.25, "use_dynamic_alpha_rho": False},
+ "NOTEARS": {
+ "alpha": 1e-1,
+ "rho": 1e-2,
+ "tol": 0.25,
+ "use_dynamic_alpha_rho": False,
+ },
"DAGMA": {"s": 1, "alpha": 1e0},
"poly": {},
}
@@ -143,13 +148,14 @@ def __init__(
# DAG regularizers
self.ss_dag_params = sample_specific_loss_params["dag"].get(
"params",
- DEFAULT_DAG_LOSS_PARAMS[sample_specific_loss_params["dag"]["loss_type"]].copy(),
+ DEFAULT_DAG_LOSS_PARAMS[
+ sample_specific_loss_params["dag"]["loss_type"]
+ ].copy(),
)
-
self.arch_dag_params = archetype_loss_params["dag"].get(
- "params",
- DEFAULT_DAG_LOSS_PARAMS[archetype_loss_params["dag"]["loss_type"]].copy()
+ "params",
+ DEFAULT_DAG_LOSS_PARAMS[archetype_loss_params["dag"]["loss_type"]].copy(),
)
self.val_dag_loss_params = {"alpha": 1e0, "rho": 1e0}
@@ -415,7 +421,8 @@ def _maybe_update_alpha_rho(self, epoch_dag_loss, dag_params):
"""
if (
dag_params.get("use_dynamic_alpha_rho", False)
- and epoch_dag_loss > dag_params.get("tol", .25) * dag_params.get("h_old", 0)
+ and epoch_dag_loss
+ > dag_params.get("tol", 0.25) * dag_params.get("h_old", 0)
and dag_params["alpha"] < 1e12
and dag_params["rho"] < 1e12
):
diff --git a/contextualized/easy/ContextualGAM.py b/contextualized/easy/ContextualGAM.py
index 9b50bca..5ea6cda 100644
--- a/contextualized/easy/ContextualGAM.py
+++ b/contextualized/easy/ContextualGAM.py
@@ -9,7 +9,18 @@
class ContextualGAMClassifier(ContextualizedClassifier):
"""
- A GAM as context encoder with a classifier on top.
+ The Contextual GAM Classifier separates and interprets the effect of context in context-varying decisions and classifiers, such as heterogeneous disease diagnoses.
+ Implemented as a Contextual Generalized Additive Model with a classifier on top.
+ Always uses a Neural Additive Model ("ngam") encoder for interpretability.
+ See `this paper `__
+ for more details.
+
+ Args:
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ num_archetypes (int, optional): Number of archetypes to use. Defaults to 0, which used the NaiveMetaModel. If > 0, uses archetypes in the ContextualizedMetaModel.
+ alpha (float, optional): Regularization strength. Defaults to 0.0.
+ mu_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization applies to context-specific parameters or context-specific offsets. Defaults to 0.0.
+ l1_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization penalizes l1 vs l2 parameter norms. Defaults to 0.0.
"""
def __init__(self, **kwargs):
@@ -19,7 +30,18 @@ def __init__(self, **kwargs):
class ContextualGAMRegressor(ContextualizedRegressor):
"""
- A GAM as context encoder with a regressor on top.
+ The Contextual GAM Regressor separates and interprets the effect of context in context-varying relationships, such as heterogeneous treatment effects.
+ Implemented as a Contextual Generalized Additive Model with a linear regressor on top.
+ Always uses a Neural Additive Model ("ngam") encoder for interpretability.
+ See `this paper `__
+ for more details.
+
+ Args:
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ num_archetypes (int, optional): Number of archetypes to use. Defaults to 0, which used the NaiveMetaModel. If > 0, uses archetypes in the ContextualizedMetaModel.
+ alpha (float, optional): Regularization strength. Defaults to 0.0.
+ mu_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization applies to context-specific parameters or context-specific offsets. Defaults to 0.0.
+ l1_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization penalizes l1 vs l2 parameter norms. Defaults to 0.0.
"""
def __init__(self, **kwargs):
diff --git a/contextualized/easy/ContextualizedClassifier.py b/contextualized/easy/ContextualizedClassifier.py
index a99d060..f5cab64 100644
--- a/contextualized/easy/ContextualizedClassifier.py
+++ b/contextualized/easy/ContextualizedClassifier.py
@@ -11,7 +11,16 @@
class ContextualizedClassifier(ContextualizedRegressor):
"""
- sklearn-like interface to Contextualized Classifiers.
+ Contextualized Logistic Regression reveals context-dependent decisions and decision boundaries.
+ Implemented as a ContextualizedRegressor with logistic link function and binary cross-entropy loss.
+
+ Args:
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ num_archetypes (int, optional): Number of archetypes to use. Defaults to 0, which used the NaiveMetaModel. If > 0, uses archetypes in the ContextualizedMetaModel.
+ encoder_type (str, optional): Type of encoder to use ("mlp", "ngam", "linear"). Defaults to "mlp".
+ alpha (float, optional): Regularization strength. Defaults to 0.0.
+ mu_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization applies to context-specific parameters or context-specific offsets. Defaults to 0.0.
+ l1_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization penalizes l1 vs l2 parameter norms. Defaults to 0.0.
"""
def __init__(self, **kwargs):
@@ -20,14 +29,15 @@ def __init__(self, **kwargs):
super().__init__(**kwargs)
def predict(self, C, X, individual_preds=False, **kwargs):
- """
- Predict outcomes from context C and predictors X.
+ """Predict binary outcomes from context C and predictors X.
- :param C:
- :param X:
- :param individual_preds:
- :param **kwargs:
+ Args:
+ C (np.ndarray): Context array of shape (n_samples, n_context_features)
+ X (np.ndarray): Predictor array of shape (N, n_features)
+ individual_preds (bool, optional): Whether to return individual predictions for each model. Defaults to False.
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The binary outcomes predicted by the context-specific models (n_samples, y_dim). Returned as lists of individual bootstraps if individual_preds is True.
"""
return np.round(super().predict(C, X, individual_preds, **kwargs))
@@ -35,10 +45,13 @@ def predict_proba(self, C, X, **kwargs):
"""
Predict probabilities of outcomes from context C and predictors X.
- :param C:
- :param X:
- :param **kwargs:
+ Args:
+ C (np.ndarray): Context array of shape (n_samples, n_context_features)
+ X (np.ndarray): Predictor array of shape (N, n_features)
+ individual_preds (bool, optional): Whether to return individual predictions for each model. Defaults to False.
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The outcome probabilities predicted by the context-specific models (n_samples, y_dim). Returned as lists of individual bootstraps if individual_preds is True.
"""
# Returns a np array of shape N samples, K outcomes, 2.
probs = super().predict(C, X, **kwargs)
diff --git a/contextualized/easy/ContextualizedNetworks.py b/contextualized/easy/ContextualizedNetworks.py
index 0cae6bc..ae3b5a7 100644
--- a/contextualized/easy/ContextualizedNetworks.py
+++ b/contextualized/easy/ContextualizedNetworks.py
@@ -1,16 +1,21 @@
"""
sklearn-like interface to Contextualized Networks.
"""
+from typing import *
+
import numpy as np
from contextualized.easy.wrappers import SKLearnWrapper
from contextualized.regression.trainers import CorrelationTrainer, MarkovTrainer
from contextualized.regression.lightning_modules import (
ContextualizedCorrelation,
- # TasksplitContextualizedCorrelation, # TODO: Incorporate Tasksplit
ContextualizedMarkovGraph,
)
-from contextualized.dags.lightning_modules import NOTMAD, DEFAULT_DAG_LOSS_TYPE, DEFAULT_DAG_LOSS_PARAMS
+from contextualized.dags.lightning_modules import (
+ NOTMAD,
+ DEFAULT_DAG_LOSS_TYPE,
+ DEFAULT_DAG_LOSS_PARAMS,
+)
from contextualized.dags.trainers import GraphTrainer
from contextualized.dags.graph_utils import dag_pred_np
@@ -20,28 +25,77 @@ class ContextualizedNetworks(SKLearnWrapper):
sklearn-like interface to Contextualized Networks.
"""
- def _split_train_data(self, C, X, **kwargs):
- return super()._split_train_data(C, X, Y_required=False, **kwargs)
+ def _split_train_data(
+ self, C: np.ndarray, X: np.ndarray, **kwargs
+ ) -> Tuple[List[np.ndarray], List[np.ndarray]]:
+ """Splits data into train and test sets.
- def predict_networks(self, C, with_offsets=False, **kwargs):
+ Args:
+ C (np.ndarray): Contextual features for each sample.
+ X (np.ndarray): The data matrix.
+
+ Returns:
+ Tuple[List[np.ndarray], List[np.ndarray]]: The train and test sets for C and X as ([C_train, X_train], [C_test, X_test]).
"""
- Predicts context-specific networks.
+ return super()._split_train_data(C, X, Y_required=False, **kwargs)
+
+ def predict_networks(
+ self,
+ C: np.ndarray,
+ with_offsets: bool,
+ individual_preds: bool = False,
+ **kwargs,
+ ) -> Union[
+ np.ndarray,
+ List[np.ndarray],
+ Tuple[np.ndarray, np.ndarray],
+ Tuple[List[np.ndarray], List[np.ndarray]],
+ ]:
+ """Predicts context-specific networks given contextual features.
+
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ with_offsets (bool, optional): If True, returns both the network parameters and offsets. Defaults to False.
+ individual_preds (bool, optional): If True, returns the predictions for each bootstrap. Defaults to False.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray], Tuple[np.ndarray, np.ndarray], Tuple[List[np.ndarray], List[np.ndarray]]]: The predicted network parameters (and offsets if with_offsets is True). Returned as lists of individual bootstraps if individual_preds is True.
"""
betas, mus = self.predict_params(C, uses_y=False, **kwargs)
if with_offsets:
return betas, mus
return betas
- def predict_X(self, C, X, **kwargs):
- """
- Predicts X based on context-specific networks.
+ def predict_X(
+ self, C: np.ndarray, X: np.ndarray, individual_preds: bool = False, **kwargs
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Reconstructs the data matrix based on predicted contextualized networks and the true data matrix.
+ Useful for measuring reconstruction error or for imputation.
+
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ X (np.ndarray): The data matrix (n_samples, n_features)
+ individual_preds (bool, optional): If True, returns the predictions for each bootstrap. Defaults to False.
+ **kwargs: Keyword arguments for the Lightning trainer's predict_y method.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The predicted data matrix, or matrices for each bootstrap if individual_preds is True (n_samples, n_features).
"""
- return self.predict(C, X, **kwargs)
+ return self.predict(C, X, individual_preds=individual_preds, **kwargs)
class ContextualizedCorrelationNetworks(ContextualizedNetworks):
"""
- Easy interface to Contextualized Correlation Networks.
+ Contextualized Correlation Networks reveal context-varying feature correlations, interaction strengths, dependencies in feature groups.
+ Uses the Contextualized Networks model, see the `paper `__ for detailed estimation procedures.
+
+ Args:
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ num_archetypes (int, optional): Number of archetypes to use. Defaults to 10. Always uses archetypes in the ContextualizedMetaModel.
+ encoder_type (str, optional): Type of encoder to use ("mlp", "ngam", "linear"). Defaults to "mlp".
+ alpha (float, optional): Regularization strength. Defaults to 0.0.
+ mu_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization applies to context-specific parameters or context-specific offsets. Defaults to 0.0.
+ l1_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization penalizes l1 vs l2 parameter norms. Defaults to 0.0.
"""
def __init__(self, **kwargs):
@@ -49,18 +103,25 @@ def __init__(self, **kwargs):
ContextualizedCorrelation, [], [], CorrelationTrainer, **kwargs
)
- def predict_correlation(self, C, individual_preds=True, squared=True, **kwargs):
- """
- Predict correlation matrices.
+ def predict_correlation(
+ self, C: np.ndarray, individual_preds: bool = True, squared: bool = True
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Predicts context-specific correlations between features.
+
+ Args:
+ C (Numpy ndarray): Contextual features for each sample (n_samples, n_context_features)
+ individual_preds (bool, optional): If True, returns the predictions for each bootstrap. Defaults to True.
+ squared (bool, optional): If True, returns the squared correlations. Defaults to True.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The predicted context-specific correlation matrices, or matrices for each bootstrap if individual_preds is True (n_samples, n_features, n_features).
"""
get_dataloader = lambda i: self.models[i].dataloader(
C, np.zeros((len(C), self.x_dim))
)
rhos = np.array(
[
- self.trainers[i].predict_params(
- self.models[i], get_dataloader(i), **kwargs
- )[0]
+ self.trainers[i].predict_params(self.models[i], get_dataloader(i))[0]
for i in range(len(self.models))
]
)
@@ -73,9 +134,18 @@ def predict_correlation(self, C, individual_preds=True, squared=True, **kwargs):
return np.square(np.mean(rhos, axis=0))
return np.mean(rhos)
- def measure_mses(self, C, X, individual_preds=False):
- """
- Measure mean-squared errors.
+ def measure_mses(
+ self, C: np.ndarray, X: np.ndarray, individual_preds: bool = False
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Measures mean-squared errors.
+
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ X (np.ndarray): The data matrix (n_samples, n_features)
+ individual_preds (bool, optional): If True, returns the predictions for each bootstrap. Defaults to False.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The mean-squared errors for each sample, or for each bootstrap if individual_preds is True (n_samples).
"""
betas, mus = self.predict_networks(C, individual_preds=True, with_offsets=True)
mses = np.zeros((len(betas), len(C))) # n_bootstraps x n_samples
@@ -92,15 +162,35 @@ def measure_mses(self, C, X, individual_preds=False):
class ContextualizedMarkovNetworks(ContextualizedNetworks):
"""
- Easy interface to Contextualized Markov Networks.
+ Contextualized Markov Networks reveal context-varying feature dependencies, cliques, and modules.
+ Implemented as Contextualized Gaussian Precision Matrices, directly interpretable as Markov Networks.
+ Uses the Contextualized Networks model, see the `paper `__ for detailed estimation procedures.
+
+ Args:
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ num_archetypes (int, optional): Number of archetypes to use. Defaults to 10. Always uses archetypes in the ContextualizedMetaModel.
+ encoder_type (str, optional): Type of encoder to use ("mlp", "ngam", "linear"). Defaults to "mlp".
+ alpha (float, optional): Regularization strength. Defaults to 0.0.
+ mu_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization applies to context-specific parameters or context-specific offsets. Defaults to 0.0.
+ l1_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization penalizes l1 vs l2 parameter norms. Defaults to 0.0.
"""
def __init__(self, **kwargs):
super().__init__(ContextualizedMarkovGraph, [], [], MarkovTrainer, **kwargs)
- def predict_precisions(self, C, individual_preds=True):
- """
- Predict precision matrices.
+ def predict_precisions(
+ self, C: np.ndarray, individual_preds: bool = True
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Predicts context-specific precision matrices.
+ Can be converted to context-specific Markov networks by binarizing the networks and setting all non-zero entries to 1.
+ Can be converted to context-specific covariance matrices by taking the inverse.
+
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ individual_preds (bool, optional): If True, returns the predictions for each bootstrap. Defaults to True.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The predicted context-specific Markov networks as precision matrices, or matrices for each bootstrap if individual_preds is True (n_samples, n_features, n_features).
"""
get_dataloader = lambda i: self.models[i].dataloader(
C, np.zeros((len(C), self.x_dim))
@@ -115,9 +205,18 @@ def predict_precisions(self, C, individual_preds=True):
return precisions
return np.mean(precisions, axis=0)
- def measure_mses(self, C, X, individual_preds=False):
- """
- Measure mean-squared errors.
+ def measure_mses(
+ self, C: np.ndarray, X: np.ndarray, individual_preds: bool = False
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Measures mean-squared errors.
+
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ X (np.ndarray): The data matrix (n_samples, n_features)
+ individual_preds (bool, optional): If True, returns the predictions for each bootstrap. Defaults to False.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The mean-squared errors for each sample, or for each bootstrap if individual_preds is True (n_samples).
"""
betas, mus = self.predict_networks(C, individual_preds=True, with_offsets=True)
mses = np.zeros((len(betas), len(C))) # n_bootstraps x n_samples
@@ -140,18 +239,40 @@ def measure_mses(self, C, X, individual_preds=False):
class ContextualizedBayesianNetworks(ContextualizedNetworks):
"""
- Easy interface to Contextualized Bayesian Networks.
- Uses NOTMAD model.
- See this paper:
- https://arxiv.org/abs/2111.01104
- for more details.
+ Contextualized Bayesian Networks and Directed Acyclic Graphs (DAGs) reveal context-dependent causal relationships, effect sizes, and variable ordering.
+ Uses the NOTMAD model, see the `paper `__ for detailed estimation procedures.
+
+ Args:
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ num_archetypes (int, optional): Number of archetypes to use. Defaults to 16. Always uses archetypes in the ContextualizedMetaModel.
+ encoder_type (str, optional): Type of encoder to use ("mlp", "ngam", "linear"). Defaults to "mlp".
+ archetype_dag_loss_type (str, optional): The type of loss to use for the archetype loss. Defaults to "l1".
+ archetype_l1 (float, optional): The strength of the l1 regularization for the archetype loss. Defaults to 0.0.
+ archetype_dag_params (dict, optional): Parameters for the archetype loss. Defaults to {"loss_type": "l1", "params": {"alpha": 0.0, "rho": 0.0, "s": 0.0, "tol": 1e-4}}.
+ archetype_dag_loss_params (dict, optional): Parameters for the archetype loss. Defaults to {"alpha": 0.0, "rho": 0.0, "s": 0.0, "tol": 1e-4}.
+ archetype_alpha (float, optional): The strength of the alpha regularization for the archetype loss. Defaults to 0.0.
+ archetype_rho (float, optional): The strength of the rho regularization for the archetype loss. Defaults to 0.0.
+ archetype_s (float, optional): The strength of the s regularization for the archetype loss. Defaults to 0.0.
+ archetype_tol (float, optional): The tolerance for the archetype loss. Defaults to 1e-4.
+ archetype_use_dynamic_alpha_rho (bool, optional): Whether to use dynamic alpha and rho for the archetype loss. Defaults to False.
+ init_mat (np.ndarray, optional): The initial adjacency matrix for the archetype loss. Defaults to None.
+ num_factors (int, optional): The number of factors for the archetype loss. Defaults to 0.
+ factor_mat_l1 (float, optional): The strength of the l1 regularization for the factor matrix for the archetype loss. Defaults to 0.
+ sample_specific_dag_loss_type (str, optional): The type of loss to use for the sample-specific loss. Defaults to "l1".
+ sample_specific_alpha (float, optional): The strength of the alpha regularization for the sample-specific loss. Defaults to 0.0.
+ sample_specific_rho (float, optional): The strength of the rho regularization for the sample-specific loss. Defaults to 0.0.
+ sample_specific_s (float, optional): The strength of the s regularization for the sample-specific loss. Defaults to 0.0.
+ sample_specific_tol (float, optional): The tolerance for the sample-specific loss. Defaults to 1e-4.
+ sample_specific_use_dynamic_alpha_rho (bool, optional): Whether to use dynamic alpha and rho for the sample-specific loss. Defaults to False.
"""
def _parse_private_init_kwargs(self, **kwargs):
"""
- Parses private init kwargs.
- """
+ Parses the kwargs for the NOTMAD model.
+ Args:
+ **kwargs: Keyword arguments for the NOTMAD model, including the encoder, archetype loss, sample-specific loss, and optimization parameters.
+ """
# Encoder Parameters
self._init_kwargs["model"]["encoder_kwargs"] = {
"type": kwargs.pop(
@@ -163,9 +284,11 @@ def _parse_private_init_kwargs(self, **kwargs):
"link_fn": self.constructor_kwargs["encoder_kwargs"]["link_fn"],
},
}
-
+
# Archetype-specific parameters
- archetype_dag_loss_type = kwargs.pop("archetype_dag_loss_type", DEFAULT_DAG_LOSS_TYPE)
+ archetype_dag_loss_type = kwargs.pop(
+ "archetype_dag_loss_type", DEFAULT_DAG_LOSS_TYPE
+ )
self._init_kwargs["model"]["archetype_loss_params"] = {
"l1": kwargs.get("archetype_l1", 0.0),
"dag": kwargs.get(
@@ -185,9 +308,11 @@ def _parse_private_init_kwargs(self, **kwargs):
}
if self._init_kwargs["model"]["archetype_loss_params"]["num_archetypes"] <= 0:
- print("WARNING: num_archetypes is 0. NOTMAD requires archetypes. Setting num_archetypes to 16.")
+ print(
+ "WARNING: num_archetypes is 0. NOTMAD requires archetypes. Setting num_archetypes to 16."
+ )
self._init_kwargs["model"]["archetype_loss_params"]["num_archetypes"] = 16
-
+
# Possibly update values with convenience parameters
for param, value in self._init_kwargs["model"]["archetype_loss_params"]["dag"][
"params"
@@ -213,11 +338,11 @@ def _parse_private_init_kwargs(self, **kwargs):
},
),
}
-
+
# Possibly update values with convenience parameters
- for param, value in self._init_kwargs["model"]["sample_specific_loss_params"]["dag"][
- "params"
- ].items():
+ for param, value in self._init_kwargs["model"]["sample_specific_loss_params"][
+ "dag"
+ ]["params"].items():
self._init_kwargs["model"]["sample_specific_loss_params"]["dag"]["params"][
param
] = kwargs.pop(f"sample_specific_{param}", value)
@@ -227,7 +352,7 @@ def _parse_private_init_kwargs(self, **kwargs):
"learning_rate": kwargs.pop("learning_rate", 1e-3),
"step": kwargs.pop("step", 50),
}
-
+
return [
"archetype_dag_loss_type",
"archetype_l1",
@@ -271,39 +396,56 @@ def __init__(self, **kwargs):
**kwargs,
)
- def predict_params(self, C, **kwargs):
- """
+ def predict_params(
+ self, C: np.ndarray, **kwargs
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Predicts context-specific Bayesian network parameters as linear coefficients in a linear structural equation model (SEM).
- :param C:
- :param individual_preds: (Default value = False)
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ **kwargs: Keyword arguments for the contextualized.dags.GraphTrainer's predict_params method.
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The linear coefficients of the predicted context-specific Bayesian network parameters (n_samples, n_features, n_features). Returned as lists of individual bootstraps if individual_preds is True.
"""
- # Returns betas
- # TODO: No mus for NOTMAD at present.
- return super().predict_params(
- C, model_includes_mus=False, **kwargs
- )
+ # No mus for NOTMAD at present.
+ return super().predict_params(C, model_includes_mus=False, **kwargs)
- def predict_networks(self, C, **kwargs):
- """
- Predicts context-specific networks.
+ def predict_networks(
+ self, C: np.ndarray, project_to_dag: bool = True, **kwargs
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Predicts context-specific Bayesian networks.
+
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ project_to_dag (bool, optional): If True, guarantees returned graphs are DAGs by trimming edges until acyclicity is satisified. Defaults to True.
+ **kwargs: Keyword arguments for the contextualized.dags.GraphTrainer's predict_params method.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The linear coefficients of the predicted context-specific Bayesian network parameters (n_samples, n_features, n_features). Returned as lists of individual bootstraps if individual_preds is True.
"""
if kwargs.pop("with_offsets", False):
print("No offsets can be returned by NOTMAD.")
betas = self.predict_params(
- C,
- uses_y=False,
- project_to_dag=kwargs.pop("project_to_dag", True),
- **kwargs
+ C, uses_y=False, project_to_dag=project_to_dag, **kwargs
)
-
return betas
- def measure_mses(self, C, X, individual_preds=False):
- """
- Measure mean-squared errors.
+ def measure_mses(
+ self, C: np.ndarray, X: np.ndarray, individual_preds: bool = False, **kwargs
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Measures mean-squared errors.
+
+ Args:
+ C (np.ndarray): Contextual features for each sample (n_samples, n_context_features)
+ X (np.ndarray): The data matrix (n_samples, n_features)
+ individual_preds (bool, optional): If True, returns the predictions for each bootstrap. Defaults to False.
+ **kwargs: Keyword arguments for the contextualized.dags.GraphTrainer's predict_params method.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The mean-squared errors for each sample, or for each bootstrap if individual_preds is True (n_samples).
"""
- betas = self.predict_networks(C, individual_preds=True)
+ betas = self.predict_networks(C, individual_preds=True, **kwargs)
mses = np.zeros((len(betas), len(C))) # n_bootstraps x n_samples
for bootstrap in range(len(betas)):
X_pred = dag_pred_np(X, betas[bootstrap])
diff --git a/contextualized/easy/ContextualizedRegressor.py b/contextualized/easy/ContextualizedRegressor.py
index 117097e..8f7fcae 100644
--- a/contextualized/easy/ContextualizedRegressor.py
+++ b/contextualized/easy/ContextualizedRegressor.py
@@ -14,7 +14,17 @@
class ContextualizedRegressor(SKLearnWrapper):
"""
- sklearn-like interface to Contextualized Regression.
+ Contextualized Linear Regression quantifies context-varying linear relationships.
+
+ Args:
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ num_archetypes (int, optional): Number of archetypes to use. Defaults to 0, which used the NaiveMetaModel. If > 0, uses archetypes in the ContextualizedMetaModel.
+ encoder_type (str, optional): Type of encoder to use ("mlp", "ngam", "linear"). Defaults to "mlp".
+ loss_fn (torch.nn.Module, optional): Loss function. Defaults to LOSSES["mse"].
+ link_fn (torch.nn.Module, optional): Link function. Defaults to LINK_FUNCTIONS["identity"].
+ alpha (float, optional): Regularization strength. Defaults to 0.0.
+ mu_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization applies to context-specific parameters or context-specific offsets. Defaults to 0.0.
+ l1_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization penalizes l1 vs l2 parameter norms. Defaults to 0.0.
"""
def __init__(self, **kwargs):
diff --git a/contextualized/easy/tests/test_bayesian_networks.py b/contextualized/easy/tests/test_bayesian_networks.py
index 60ff6eb..5ba4c46 100644
--- a/contextualized/easy/tests/test_bayesian_networks.py
+++ b/contextualized/easy/tests/test_bayesian_networks.py
@@ -28,13 +28,17 @@ def setUp(self):
def test_bayesian_factors(self):
"""Test case for ContextualizedBayesianNetworks."""
- model = ContextualizedBayesianNetworks(encoder_type="ngam", num_archetypes=16, num_factors=2)
+ model = ContextualizedBayesianNetworks(
+ encoder_type="ngam", num_archetypes=16, num_factors=2
+ )
model.fit(self.C, self.X, max_epochs=10)
networks = model.predict_networks(self.C, individual_preds=False)
assert np.shape(networks) == (self.n_samples, self.x_dim, self.x_dim)
networks = model.predict_networks(self.C, factors=True)
assert np.shape(networks) == (self.n_samples, 2, 2)
- model = ContextualizedBayesianNetworks(encoder_type="ngam", num_archetypes=16, num_factors=2)
+ model = ContextualizedBayesianNetworks(
+ encoder_type="ngam", num_archetypes=16, num_factors=2
+ )
self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3)
def test_bayesian_default(self):
@@ -43,7 +47,9 @@ def test_bayesian_default(self):
def test_bayesian_val_split(self):
model = ContextualizedBayesianNetworks()
- self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3, val_split=0.5)
+ self._quicktest(
+ model, self.C, self.X, max_epochs=10, learning_rate=1e-3, val_split=0.5
+ )
def test_bayesian_archetypes(self):
model = ContextualizedBayesianNetworks(num_archetypes=16)
@@ -61,12 +67,16 @@ def test_bayesian_encoder(self):
assert np.shape(networks) == (self.n_samples, self.x_dim, self.x_dim)
def test_bayesian_acyclicity(self):
- model = ContextualizedBayesianNetworks(archetype_dag_loss_type="DAGMA", num_archetypes=16)
+ model = ContextualizedBayesianNetworks(
+ archetype_dag_loss_type="DAGMA", num_archetypes=16
+ )
self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3)
networks = model.predict_networks(self.C, individual_preds=False)
assert np.shape(networks) == (self.n_samples, self.x_dim, self.x_dim)
- model = ContextualizedBayesianNetworks(archetype_dag_loss_type="poly", num_archetypes=16)
+ model = ContextualizedBayesianNetworks(
+ archetype_dag_loss_type="poly", num_archetypes=16
+ )
self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3)
networks = model.predict_networks(self.C, individual_preds=False)
assert np.shape(networks) == (self.n_samples, self.x_dim, self.x_dim)
diff --git a/contextualized/easy/tests/test_correlation_networks.py b/contextualized/easy/tests/test_correlation_networks.py
index 90cd890..52dbd3d 100644
--- a/contextualized/easy/tests/test_correlation_networks.py
+++ b/contextualized/easy/tests/test_correlation_networks.py
@@ -33,7 +33,9 @@ def test_correlation(self):
model = ContextualizedCorrelationNetworks()
self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3)
- self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3, val_split=0.5)
+ self._quicktest(
+ model, self.C, self.X, max_epochs=10, learning_rate=1e-3, val_split=0.5
+ )
model = ContextualizedCorrelationNetworks(num_archetypes=16)
self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3)
diff --git a/contextualized/easy/tests/test_markov_networks.py b/contextualized/easy/tests/test_markov_networks.py
index eb8f1b4..b778321 100644
--- a/contextualized/easy/tests/test_markov_networks.py
+++ b/contextualized/easy/tests/test_markov_networks.py
@@ -30,7 +30,9 @@ def test_markov(self):
"""Test Case for ContextualizedMarkovNetworks."""
model = ContextualizedMarkovNetworks()
self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3)
- self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3, val_split=0.5)
+ self._quicktest(
+ model, self.C, self.X, max_epochs=10, learning_rate=1e-3, val_split=0.5
+ )
model = ContextualizedMarkovNetworks(num_archetypes=16)
self._quicktest(model, self.C, self.X, max_epochs=10, learning_rate=1e-3)
diff --git a/contextualized/easy/tests/test_regressor.py b/contextualized/easy/tests/test_regressor.py
index e3f66e2..a0b70a3 100644
--- a/contextualized/easy/tests/test_regressor.py
+++ b/contextualized/easy/tests/test_regressor.py
@@ -90,7 +90,7 @@ def test_regressor(self):
learning_rate=1e-3,
es_patience=float("inf"),
)
-
+
# Check smaller Y.
model = ContextualizedRegressor(
num_archetypes=4, alpha=1e-1, l1_ratio=0.5, mu_ratio=0.1
diff --git a/contextualized/easy/wrappers/SKLearnWrapper.py b/contextualized/easy/wrappers/SKLearnWrapper.py
index fed4090..33de005 100644
--- a/contextualized/easy/wrappers/SKLearnWrapper.py
+++ b/contextualized/easy/wrappers/SKLearnWrapper.py
@@ -3,6 +3,8 @@
"""
import copy
import os
+from typing import *
+
import numpy as np
from pytorch_lightning.callbacks.early_stopping import EarlyStopping
from pytorch_lightning.callbacks import ModelCheckpoint
@@ -28,6 +30,19 @@
class SKLearnWrapper:
"""
An sklearn-like wrapper for Contextualized models.
+
+ Args:
+ base_constructor (class): The base class to construct the model.
+ extra_model_kwargs (dict): Extra kwargs to pass to the model constructor.
+ extra_data_kwargs (dict): Extra kwargs to pass to the dataloader constructor.
+ trainer_constructor (class): The trainer class to use.
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ encoder_type (str, optional): Type of encoder to use ("mlp", "ngam", "linear"). Defaults to "mlp".
+ loss_fn (torch.nn.Module, optional): Loss function. Defaults to LOSSES["mse"].
+ link_fn (torch.nn.Module, optional): Link function. Defaults to LINK_FUNCTIONS["identity"].
+ alpha (float, optional): Regularization strength. Defaults to 0.0.
+ mu_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization applies to context-specific parameters or context-specific offsets.
+ l1_ratio (float, optional): Float in range (0.0, 1.0), governs how much the regularization penalizes l1 vs l2 parameter norms.
"""
def _set_defaults(self):
@@ -44,12 +59,12 @@ def _set_defaults(self):
self.default_encoder_type = DEFAULT_ENCODER_TYPE
def __init__(
- self,
- base_constructor,
- extra_model_kwargs,
- extra_data_kwargs,
- trainer_constructor,
- **kwargs,
+ self,
+ base_constructor,
+ extra_model_kwargs,
+ extra_data_kwargs,
+ trainer_constructor,
+ **kwargs,
):
self._set_defaults()
self.base_constructor = base_constructor
@@ -69,7 +84,7 @@ def __init__(
"test_batch_size",
"C_val",
"X_val",
- "val_split"
+ "val_split",
],
"model": [
"loss_fn",
@@ -137,14 +152,18 @@ def __init__(
if k not in self.constructor_kwargs and k not in self.convenience_kwargs
}
# Some args will not be ignored by wrapper because sub-class will handle them.
- #self.private_kwargs = kwargs.pop("private_kwargs", [])
- #self.private_kwargs.append("private_kwargs")
+ # self.private_kwargs = kwargs.pop("private_kwargs", [])
+ # self.private_kwargs.append("private_kwargs")
# Add Predictor-Specific kwargs for parsing.
- self._init_kwargs, unrecognized_general_kwargs = self._organize_kwargs(**self.not_constructor_kwargs)
+ self._init_kwargs, unrecognized_general_kwargs = self._organize_kwargs(
+ **self.not_constructor_kwargs
+ )
for key, value in self.constructor_kwargs.items():
self._init_kwargs["model"][key] = value
recognized_private_init_kwargs = self._parse_private_init_kwargs(**kwargs)
- for kwarg in set(unrecognized_general_kwargs) - set(recognized_private_init_kwargs):
+ for kwarg in set(unrecognized_general_kwargs) - set(
+ recognized_private_init_kwargs
+ ):
print(f"Received unknown keyword argument {kwarg}, probably ignoring.")
def _organize_and_expand_fit_kwargs(self, **kwargs):
@@ -175,8 +194,8 @@ def maybe_add_kwarg(category, kwarg, default_val):
maybe_add_kwarg("model", "x_dim", self.x_dim)
maybe_add_kwarg("model", "y_dim", self.y_dim)
if (
- "num_archetypes" in organized_kwargs["model"]
- and organized_kwargs["model"]["num_archetypes"] == 0
+ "num_archetypes" in organized_kwargs["model"]
+ and organized_kwargs["model"]["num_archetypes"] == 0
):
del organized_kwargs["model"]["num_archetypes"]
@@ -212,7 +231,6 @@ def maybe_add_kwarg(category, kwarg, default_val):
maybe_add_kwarg("trainer", "accelerator", self.accelerator)
return organized_kwargs
-
def _parse_private_fit_kwargs(self, **kwargs):
"""
Parse private (model-specific) kwargs passed to fit function.
@@ -234,8 +252,9 @@ def _update_acceptable_kwargs(self, category, new_kwargs, acceptable=True):
If acceptable=False, the new kwargs will be removed from the list of acceptable kwargs.
"""
if acceptable:
- self.acceptable_kwargs[category] = list(set(
- self.acceptable_kwargs[category]).union(set(new_kwargs)))
+ self.acceptable_kwargs[category] = list(
+ set(self.acceptable_kwargs[category]).union(set(new_kwargs))
+ )
else:
self.acceptable_kwargs[category] = list(
set(self.acceptable_kwargs[category]) - set(new_kwargs)
@@ -252,7 +271,7 @@ def _organize_kwargs(self, **kwargs):
organized_kwargs = {category: {} for category in self.acceptable_kwargs}
unrecognized_kwargs = []
for kwarg, value in kwargs.items():
- #if kwarg in self.private_kwargs:
+ # if kwarg in self.private_kwargs:
# continue
not_found = True
for category, category_kwargs in self.acceptable_kwargs.items():
@@ -367,11 +386,18 @@ def _build_dataloaders(self, model, train_data, val_data, **kwargs):
return train_dataloader, val_dataloader
- def predict(self, C, X, individual_preds=False, **kwargs):
- """
- :param C:
- :param X:
- :param individual_preds: (Default value = False)
+ def predict(
+ self, C: np.ndarray, X: np.ndarray, individual_preds: bool = False, **kwargs
+ ) -> Union[np.ndarray, List[np.ndarray]]:
+ """Predict outcomes from context C and predictors X.
+
+ Args:
+ C (np.ndarray): Context array of shape (n_samples, n_context_features)
+ X (np.ndarray): Predictor array of shape (N, n_features)
+ individual_preds (bool, optional): Whether to return individual predictions for each model. Defaults to False.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray]]: The outcomes predicted by the context-specific models (n_samples, y_dim). Returned as lists of individual bootstraps if individual_preds is True.
"""
if not hasattr(self, "models") or self.models is None:
raise ValueError(
@@ -392,11 +418,33 @@ def predict(self, C, X, individual_preds=False, **kwargs):
return np.mean(predictions, axis=0)
def predict_params(
- self, C, individual_preds=False, model_includes_mus=True, **kwargs
- ):
+ self,
+ C: np.ndarray,
+ individual_preds: bool = False,
+ model_includes_mus: bool = True,
+ **kwargs,
+ ) -> Union[
+ np.ndarray,
+ List[np.ndarray],
+ Tuple[np.ndarray, np.ndarray],
+ Tuple[List[np.ndarray], List[np.ndarray]],
+ ]:
"""
- :param C:
- :param individual_preds: (Default value = False)
+ Predict context-specific model parameters from context C.
+
+ Args:
+ C (np.ndarray): Context array of shape (n_samples, n_context_features)
+ individual_preds (bool, optional): Whether to return individual model predictions for each bootstrap. Defaults to False, averaging across bootstraps.
+ model_includes_mus (bool, optional): Whether the model includes context-specific offsets (mu). Defaults to True.
+
+ Returns:
+ Union[np.ndarray, List[np.ndarray], Tuple[np.ndarray, np.ndarray], Tuple[List[np.ndarray], List[np.ndarray]]: The parameters of the predicted context-specific models.
+ Returned as lists of individual bootstraps if individual_preds is True, otherwise averages the bootstraps for a better estimate.
+ If model_includes_mus is True, returns both coefficients and offsets as a tuple of (betas, mus). Otherwise, returns coefficients (betas) only.
+ For model_includes_mus=True, ([betas], [mus]) if individual_preds is True, otherwise (betas, mus).
+ For model_includes_mus=False, [betas] if individual_preds is True, otherwise betas.
+ betas is shape (n_samples, x_dim, y_dim) or (n_samples, x_dim) if y_dim = 1.
+ mus is shape (n_samples, y_dim) or (n_samples,) if y_dim = 1.
"""
# Returns betas, mus
if kwargs.pop("uses_y", True):
@@ -423,13 +471,25 @@ def predict_params(
return np.mean(betas, axis=0)
return betas
- def fit(self, *args, **kwargs):
+ def fit(self, *args, **kwargs) -> None:
"""
- Fit model to data.
- Requires numpy arrays C, X, with optional Y.
- If target Y is not given, then X is assumed to be the target.
- :param *args: C, X, Y (optional)
- :param **kwargs:
+ Fit contextualized model to data.
+
+ Args:
+ C (np.ndarray): Context array of shape (n_samples, n_context_features)
+ X (np.ndarray): Predictor array of shape (N, n_features)
+ Y (np.ndarray, optional): Target array of shape (N, n_targets). Defaults to None, where X will be used as targets such as in Contextualized Networks.
+ max_epochs (int, optional): Maximum number of epochs to train for. Defaults to 1.
+ learning_rate (float, optional): Learning rate for optimizer. Defaults to 1e-3.
+ val_split (float, optional): Proportion of data to use for validation and early stopping. Defaults to 0.2.
+ n_bootstraps (int, optional): Number of bootstraps to use. Defaults to 1.
+ train_batch_size (int, optional): Batch size for training. Defaults to 1.
+ val_batch_size (int, optional): Batch size for validation. Defaults to 16.
+ test_batch_size (int, optional): Batch size for testing. Defaults to 16.
+ es_patience (int, optional): Number of epochs to wait before early stopping. Defaults to 1.
+ es_monitor (str, optional): Metric to monitor for early stopping. Defaults to "val_loss".
+ es_mode (str, optional): Mode for early stopping. Defaults to "min".
+ es_verbose (bool, optional): Whether to print early stopping updates. Defaults to False.
"""
self.models = []
self.trainers = []
@@ -469,7 +529,9 @@ def fit(self, *args, **kwargs):
for f in organized_kwargs["trainer"]["callback_constructors"]
]
del my_trainer_kwargs["callback_constructors"]
- trainer = self.trainer_constructor(**my_trainer_kwargs, enable_progress_bar=False)
+ trainer = self.trainer_constructor(
+ **my_trainer_kwargs, enable_progress_bar=False
+ )
checkpoint_callback = my_trainer_kwargs["callbacks"][1]
os.makedirs(checkpoint_callback.dirpath, exist_ok=True)
try:
diff --git a/contextualized/modules.py b/contextualized/modules.py
index fc69678..a880f26 100644
--- a/contextualized/modules.py
+++ b/contextualized/modules.py
@@ -143,11 +143,7 @@ class Linear(nn.Module):
Linear encoder
"""
- def __init__(
- self,
- input_dim,
- output_dim
- ):
+ def __init__(self, input_dim, output_dim):
super().__init__()
self.linear = MLP(
input_dim, output_dim, width=output_dim, layers=0, activation=None
@@ -158,11 +154,7 @@ def forward(self, X):
return self.linear(X)
-ENCODERS = {
- "mlp": MLP,
- "ngam": NGAM,
- "linear": Linear
-}
+ENCODERS = {"mlp": MLP, "ngam": NGAM, "linear": Linear}
if __name__ == "__main__":
diff --git a/contextualized_logo.png b/contextualized_logo.png
deleted file mode 100644
index 5acce57..0000000
Binary files a/contextualized_logo.png and /dev/null differ
diff --git a/dev_requirements.txt b/dev_requirements.txt
new file mode 100644
index 0000000..7071e1b
--- /dev/null
+++ b/dev_requirements.txt
@@ -0,0 +1,22 @@
+# This file specifies extra dependencies for the development of Contextualized ML
+
+# Style
+black==23.12.1
+pylint==2.15.5
+pylint-badge @ git+https://github.com/blengerich/pylint-badge
+
+# Documentation
+jupyter-book==0.15.1
+myst-parser==0.18.1
+Sphinx==5.0.2
+sphinx-book-theme==1.0.1
+sphinx-jupyterbook-latex==0.5.2
+sphinx-rtd-theme==2.0.0
+sphinx_external_toc==0.3.1
+
+# Testing
+pytest==7.4.3
+
+# Packaging
+toml==0.10.2
+tomli==2.0.1
diff --git a/docs/_config.yml b/docs/_config.yml
index 2b7a4f1..631c484 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -12,10 +12,11 @@ url: "https://contextualized.ml"
# Force re-execution of notebooks on each build.
# See https://jupyterbook.org/content/execute.html
execute:
- execute_notebooks: force
- timeout: 300
+ execute_notebooks: 'off'
+ # execute_notebooks: force
+ # timeout: 300
-only_build_toc_files: true
+only_build_toc_files: false
# Define the name of the latex output file for PDF builds
latex:
@@ -37,3 +38,27 @@ repository:
html:
use_issues_button: true
use_repository_button: true
+
+# https://jupyterbook.org/en/stable/advanced/developers.html
+sphinx:
+ extra_extensions:
+ - 'sphinx.ext.autodoc'
+ - 'sphinx.ext.napoleon'
+ - 'sphinx.ext.viewcode'
+ - 'sphinx.ext.autosummary'
+ config:
+ add_module_names: False
+ autosummary_generate: True
+ html_theme: sphinx_book_theme
+ # templates_path: ['_templates']
+ # - sphinx.ext.duration
+# - sphinx.ext.doctest
+# - sphinx.ext.intersphinx
+# - nbsphinx
+# - myst_parser
+ # 'sphinx.ext.doctest',
+ # 'sphinx.ext.autodoc',
+ # 'sphinx.ext.autosummary',
+ # 'sphinx.ext.intersphinx',
+ # 'nbsphinx',
+ # 'myst_parser',
\ No newline at end of file
diff --git a/docs/_toc.yml b/docs/_toc.yml
index 16307a3..87accbe 100644
--- a/docs/_toc.yml
+++ b/docs/_toc.yml
@@ -25,6 +25,8 @@ parts:
- caption: Demos
chapters:
- file: demos/custom_models
- - file: demos/robust-outliers
- file: demos/benefits
-
+ - caption: API Reference
+ chapters:
+ - file: source/easy
+ - file: source/analysis
\ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..687060f
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,37 @@
+###############################################################################
+# Auto-generated by `jupyter-book config`
+# If you wish to continue using _config.yml, make edits to that file and
+# re-generate this one.
+###############################################################################
+add_module_names = False
+author = 'Contextualized.ML Team'
+autosummary_generate = True
+bibtex_bibfiles = ['references.bib']
+comments_config = {'hypothesis': False, 'utterances': False}
+copyright = '2023'
+exclude_patterns = ['**.ipynb_checkpoints', '.DS_Store', 'Thumbs.db', '_build']
+extensions = ['sphinx_togglebutton', 'sphinx_copybutton', 'myst_nb', 'jupyter_book', 'sphinx_thebe', 'sphinx_comments', 'sphinx_external_toc', 'sphinx.ext.intersphinx', 'sphinx_design', 'sphinx_book_theme', 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', 'sphinx.ext.autosummary', 'sphinxcontrib.bibtex', 'sphinx_jupyterbook_latex']
+external_toc_exclude_missing = False
+external_toc_path = '_toc.yml'
+html_baseurl = ''
+html_favicon = ''
+html_logo = 'logo.png'
+html_sourcelink_suffix = ''
+html_theme = 'sphinx_book_theme'
+html_theme_options = {'search_bar_text': 'Search this book...', 'launch_buttons': {'notebook_interface': 'classic', 'binderhub_url': '', 'jupyterhub_url': '', 'thebe': False, 'colab_url': ''}, 'path_to_docs': '', 'repository_url': 'https://github.com/cnellington/contextualized', 'repository_branch': 'master', 'extra_footer': '', 'home_page_in_toc': True, 'announcement': '', 'analytics': {'google_analytics_id': ''}, 'use_repository_button': True, 'use_edit_page_button': False, 'use_issues_button': True}
+html_title = 'Contextualized.ML Documentation'
+latex_engine = 'pdflatex'
+myst_enable_extensions = ['colon_fence', 'dollarmath', 'linkify', 'substitution', 'tasklist']
+myst_url_schemes = ['mailto', 'http', 'https']
+nb_execution_allow_errors = False
+nb_execution_cache_path = ''
+nb_execution_excludepatterns = []
+nb_execution_in_temp = False
+nb_execution_mode = 'off'
+nb_execution_timeout = 30
+nb_output_stderr = 'show'
+numfig = True
+pygments_style = 'sphinx'
+suppress_warnings = ['myst.domains']
+use_jupyterbook_latex = True
+use_multitoc_numbering = True
diff --git a/docs/models/easy_bayesian_networks_factors.ipynb b/docs/models/easy_bayesian_networks_factors.ipynb
index 54880e2..9cd0277 100644
--- a/docs/models/easy_bayesian_networks_factors.ipynb
+++ b/docs/models/easy_bayesian_networks_factors.ipynb
@@ -5,7 +5,7 @@
"id": "6e32bc2f",
"metadata": {},
"source": [
- "# Contextualized Bayesian Networks"
+ "# Low-dimensional Contextualized Bayesian Networks"
]
},
{
@@ -259,7 +259,7 @@
"outputs": [
{
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAGwCAYAAABB4NqyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/av/WaAAAACXBIWXMAAA9hAAAPYQGoP6dpAABMRUlEQVR4nO3deVxU9eI+8GcYYFiUkX0dWcRdAUVB1HJD0czy3luZ326i2eqSXlqu9LtX225qt0VTUiu36lbWLe1WhikIpqIoSi6pySYoq2wDKOuc3x/IkRFQQIYzy/N+veZ1m8OZ42fOneLxmc/5HJkgCAKIiIiITIiZ1AMgIiIi6m4MQERERGRyGICIiIjI5DAAERERkclhACIiIiKTwwBEREREJocBiIiIiEyOudQD0EcajQa5ubno2bMnZDKZ1MMhIiKidhAEARUVFfDw8ICZ2e07HgagVuTm5kKlUkk9DCIiIuqEnJwceHl53XYfBqBW9OzZE0DjCbSzs5N4NERERNQearUaKpVK/D1+OwxArWj62svOzo4BiIiIyMC0Z/oKJ0ETERGRyWEAIiIiIpPDAEREREQmhwGIiIiITA4DEBEREZkcBiAiIiIyOQxAREREZHIYgIiIiMjkMAARERGRyWEAIiIiIpPDAEREREQmhwGIiIiITA4DEBEREXWbypp6HMsqkXoYDEBERETUfbYfzsLDG5MQ/d1pScfBAERERETdorKmHh//mgEACPV1kHQsDEBERETULbYfzkLZtTr4OdtiRqCHpGNhACIiIiKdq6iuE9ufJZP6Qm4mk3Q8DEBERESkc58mXRLbn/sDpG1/AAYgIiIi0jF9a38ABiAiIiLSsaa5P330pP0BGICIiIhIhxrbn0wAwPN60v4ADEBERESkQ9sPZ6H8un61PwADEBEREemIvrY/AAMQERER6ci2Q43tj79LD71qfwAGICIiItIBdXUdPjmon+0PwABEREREOrC9Wfszfai71MNpQdIAdODAAcyYMQMeHh6QyWTYtWvXbfefO3cuZDJZi8fgwYPFfV599dUWPx8wYICO3wkRERE1Uevhuj+3kjQAVVVVITAwEDExMe3af+3atcjLyxMfOTk5cHBwwMMPP6y13+DBg7X2O3jwoC6GT0RERK3YdigL6up69HXpgfv0sP0BAHMp//Bp06Zh2rRp7d5fqVRCqVSKz3ft2oXS0lLMmzdPaz9zc3O4ubl12TiJiIiofdTVdfjkRvujj3N/mhj0HKDNmzcjPDwc3t7eWtsvXrwIDw8P+Pn54bHHHkN2dvZtj1NTUwO1Wq31ICIioo4zhPYHMOAAlJubi59//hlPPvmk1vbQ0FBs27YNsbGx2LBhAzIzM3HPPfegoqKizWOtXLlSbJeUSiVUKpWuh09ERGR0yq/fbH+WhOtv+wMYcADavn07evXqhZkzZ2ptnzZtGh5++GEEBAQgIiICu3fvRllZGb7++us2jxUdHY3y8nLxkZOTo+PRExERGR+t9meI/rY/gMRzgDpLEARs2bIFjz/+OCwtLW+7b69evdCvXz+kpaW1uY9CoYBCoejqYRIREZmM8ut12HzwZvtjpsftD2CgDVBiYiLS0tIwf/78O+5bWVmJ9PR0uLvrdxIlIiIyZFsPZUJdXY9+rvrf/gASB6DKykqkpqYiNTUVAJCZmYnU1FRx0nJ0dDTmzJnT4nWbN29GaGgohgwZ0uJnL774IhITE5GVlYXDhw/jT3/6E+RyOWbPnq3T90JERGSqGtufxlWfl0zqp/ftDyDxV2DHjx/HhAkTxOdRUVEAgMjISGzbtg15eXktruAqLy/Ht99+i7Vr17Z6zMuXL2P27NkoLi6Gs7Mzxo4diyNHjsDZ2Vl3b4SIiMiEbT2UiYob7c+0IYaxDI1MEARB6kHoG7VaDaVSifLyctjZ2Uk9HCIiIr1Vfr0OY1fHo6K6HjH/NxzTA6T7+qsjv78Ncg4QERER6YctBxvbn/6uPQ2m/QEYgIiIiKiTyq/XYcuhG3N/DODKr+YYgIiIiKhTmrc/UwcbTvsDMAARERFRJxhy+wMwABEREVEnbL7R/gxwM7z2B2AAIiIiog4qv1aHreK6P4bX/gAMQERERNRBmw9loqKmsf2JMMD2B2AAIiIiog5o3v4sNcC5P00YgIiIiKjdNh/MENufKYMMs/0BGICIiIioncqu1WLroSwAht3+AAxARERE1E5bDmYaRfsDMAARERFRO5Rdq8UWsf0xjDu+3w4DEBEREd3R5oOZqKypx0B3O0wZ5Cr1cO4aAxARERHdVvO5P4a67s+tGICIiIjotj751bjaH4ABiIiIiG6jtKoW2w5nATD8K7+aYwAiIiKiNjXN/RlkRO0PwABEREREbWje/iwJ7wuZzDjaH4ABiIiIiNrwycEMo2x/AAYgIiIiakVpVS22NVv12ZjaH4ABiIiIiFrxycEMVNU2YJC7HSYbWfsDMAARERHRLUqMvP0BGICIiIjoFp/82tj+DPYwzvYHYAAiIiKiZkqqarFdXPenn1G2PwADEBERETXT1P4M8bRD+EAXqYejMwxAREREBOCW9meS8bY/AAMQERER3fBxs/ZnkhG3PwADEBEREcG02h+AAYiIiIjQ2P5cq23AUE+l0bc/AAMQERGRySuurGl25ZdxrvtzKwYgIiIiE/fxr5li+zNxgPG3PwADEBERkUkrrqzBp0lZAEyn/QEYgIiIiEzaRzfm/gR4mU77AzAAERERmaziyhp8evgSANNqfwAGICIiIpP10a8ZuF7X2P5M6G867Q/AAERERGSSTLn9ARiAiIiITNJHBxrbn0ATbH8AiQPQgQMHMGPGDHh4eEAmk2HXrl233T8hIQEymazFIz8/X2u/mJgY+Pj4wMrKCqGhoUhOTtbhuyAiIjIsVytr8GlSU/tj/Ks+t0bSAFRVVYXAwEDExMR06HUXLlxAXl6e+HBxuZlcd+zYgaioKKxYsQInTpxAYGAgIiIiUFhY2NXDJyIiMkgfN2t/xvd3lno4kjCX8g+fNm0apk2b1uHXubi4oFevXq3+7L333sNTTz2FefPmAQA2btyIn376CVu2bMGyZcvuZrhEREQGj+1PI4OcAxQUFAR3d3dMnjwZhw4dErfX1tYiJSUF4eHh4jYzMzOEh4cjKSmpzePV1NRArVZrPYiIiIyROPdH1ctk2x/AwAKQu7s7Nm7ciG+//RbffvstVCoVxo8fjxMnTgAArl69ioaGBri6umq9ztXVtcU8oeZWrlwJpVIpPlQqlU7fBxERkRSumuiqz62R9Cuwjurfvz/69+8vPh89ejTS09Px/vvv47PPPuv0caOjoxEVFSU+V6vVDEFERGR0PjqQgeo6DYJUvTC+n+m2P4CBBaDWhISE4ODBgwAAJycnyOVyFBQUaO1TUFAANze3No+hUCigUCh0Ok4iIiIpFVWw/WnOoL4Ca01qairc3d0BAJaWlggODkZcXJz4c41Gg7i4OISFhUk1RCIiIsl9dCBdbH/GmXj7A0jcAFVWViItLU18npmZidTUVDg4OKB3796Ijo7GlStX8OmnnwIA1qxZA19fXwwePBjV1dX45JNPEB8fj19++UU8RlRUFCIjIzFixAiEhIRgzZo1qKqqEq8KIyIiMjVFFTX47IjprvrcGkkD0PHjxzFhwgTxedM8nMjISGzbtg15eXnIzs4Wf15bW4sXXngBV65cgY2NDQICArBv3z6tY8yaNQtFRUVYvnw58vPzERQUhNjY2BYTo4mIiEzFpkS2P7eSCYIgSD0IfaNWq6FUKlFeXg47Ozuph0NERNRphRXVuPft/aiu02D7EyFGHYA68vvb4OcAERERUds+Smy88mtY7164t6+T1MPRGwxARERERqqwohqfH+Wqz61hACIiIjJSm260P8PZ/rTAAERERGSECiuq8fkRtj9tYQAiIiIyQpsSM1BT39j+3MP2pwUGICIiIiPD9ufOGICIiIiMzMaExvYn2Nue7U8bGICIiIiMSKG6Gv85ylWf74QBiIiIyIhsTLzZ/oz1Z/vTFgYgIiIiI8H2p/0YgIiIiIzEhsR01NRrMILtzx0xABERERmBQnU1vjjaeANxXvl1ZwxARERERqB5+zPG31Hq4eg9BiAiIiIDV6Cuxn9utD9/m8z2pz0YgIiIiAzchoR01NZrMNLHHqP7sP1pDwYgIiIiA1agrsYXyZz701EMQERERAaM7U/nMAAREREZqPzym+3P39j+dAgDEBERkYHamNjY/oT4OCCM7U+HMAAREREZoObtD1d97jgGICIiIgO0ISGtsf3xZfvTGQxAREREBia/vBpfJucAYPvTWQxAREREBmZDQhpqG260P35sfzqDAYiIiMiA5JVfZ/vTBRiAiIiIDMiGhHTUNmgQ6uuA0X14x/fOYgAiIiIyEHnl1/GV2P70k3g0ho0BiIiIyEA0b3945dfdYQAiIiIyALllbH+6EgMQERGRAWhqf0b5sf3pCgxAREREei637Dp2HGtsf5ZMYvvTFRiAiIiI9Bzbn67HAERERKTHmrc/nPvTdRiAiIiI9NiHN1Z9DvNzxCiu+txlGICIiIj0lNbcn/C+Eo/GuDAAERER6amY/WmoaxDY/ugAAxAREZEeulJ2HV8fv3nPL+paDEBERER66MMb7c/oPo4IZfvT5SQNQAcOHMCMGTPg4eEBmUyGXbt23Xb/7777DpMnT4azszPs7OwQFhaGPXv2aO3z6quvQiaTaT0GDBigw3dBRETUtbTbH175pQuSBqCqqioEBgYiJiamXfsfOHAAkydPxu7du5GSkoIJEyZgxowZOHnypNZ+gwcPRl5envg4ePCgLoZPRESkE01zf8b4OyLE10Hq4Rglcyn/8GnTpmHatGnt3n/NmjVaz9966y18//33+OGHHzBs2DBxu7m5Odzc3Np93JqaGtTU1IjP1Wp1u19LRETUlS6XXsM3x7nqs64Z9BwgjUaDiooKODhop+OLFy/Cw8MDfn5+eOyxx5CdnX3b46xcuRJKpVJ8qFQqXQ6biIioTR8mpLP96QYGHYDeeecdVFZW4pFHHhG3hYaGYtu2bYiNjcWGDRuQmZmJe+65BxUVFW0eJzo6GuXl5eIjJyenO4ZPRESkpXn7w7k/uiXpV2B344svvsBrr72G77//Hi4uLuL25l+pBQQEIDQ0FN7e3vj6668xf/78Vo+lUCigUCh0PmYiIqLbidnf2P6M9XfCSB+2P7pkkAHoq6++wpNPPolvvvkG4eHht923V69e6NevH9LS0rppdERERB2nNfeH6/7onMF9Bfbll19i3rx5+PLLLzF9+vQ77l9ZWYn09HS4u7t3w+iIiIg6J2Z/Ouo1bH+6i6QNUGVlpVYzk5mZidTUVDg4OKB3796Ijo7GlStX8OmnnwJo/NorMjISa9euRWhoKPLz8wEA1tbWUCqVAIAXX3wRM2bMgLe3N3Jzc7FixQrI5XLMnj27+98gERFRO+SUNJ/7w/anO0jaAB0/fhzDhg0TL2GPiorCsGHDsHz5cgBAXl6e1hVcH330Eerr67Fw4UK4u7uLjyVLloj7XL58GbNnz0b//v3xyCOPwNHREUeOHIGzs3P3vjkiIqJ2+jAhDfUaAff0dcIItj/dQiYIgiD1IPSNWq2GUqlEeXk57OzspB4OEREZsZySa5jwTgLqNQL++2wYA9Bd6Mjvb4ObA0RERGRM2P5IgwGIiIhIIo1zfy4D4Nyf7sYAREREJJGY/Tfbn2Bvtj/diQGIiIhIAjkl1/DfFLY/UmEAIiIiksD6eLY/UmIAIiIi6mbZxdfw7Ymm9of3/JICAxAREVE3a5r7c28/ZwR720s9HJPEAERERNSNtNsfzv2RCgMQERFRN1q//yLqNQLG9XPG8N5sf6TCAERERNRNGtufKwB4x3epMQARERF1k/X7L6KB7Y9eYAAiIiLqBpeKq8T2h3N/pMcARERE1A3Wx6ehQSNgfH9nDGP7IzkGICIiIh27VFyF707emPszie2PPmAAIiIi0jG2P/qHAYiIiEiHsq7ebH+46rP+YAAiIiLSofX7G9ufCf2dEaTqJfVw6AYGICIiIh3JulqFnU1zf9j+6BUGICIiIh1ZF8/2R18xABEREelA1tUq7Erl3B99xQBERESkA03tz8QBLghk+6N3GICIiIi6WPP2h+v+6CcGICIioi72QXzjPb8msf3RWwxAREREXSjzahV2neQd3/UdAxAREVEXWhd/ERoBmDTABQFevaQeDrWBAYiIiKiLsP0xHAxAREREXWRdXGP7Ez6Q7Y++YwAiIiLqAhlFlc2u/OK6P/qOAYiIiKgLrI9PE9ufoV5KqYdDd8AAREREdJfS2f4YHAYgIiKiu3Sz/XFl+2MgGICIiIjuQnpRJb4X7/nFK78MBQMQERHRXWje/gzxZPtjKBiAiIiIOontj+FiACIiIuqkpnV/Jg9i+2NoGICIiIg6Ia2wEv/7LRcA7/huiDocgOrq6mBubo4zZ87c9R9+4MABzJgxAx4eHpDJZNi1a9cdX5OQkIDhw4dDoVDA398f27Zta7FPTEwMfHx8YGVlhdDQUCQnJ9/1WImIiJpruucX2x/D1OEAZGFhgd69e6OhoeGu//CqqioEBgYiJiamXftnZmZi+vTpmDBhAlJTU7F06VI8+eST2LNnj7jPjh07EBUVhRUrVuDEiRMIDAxEREQECgsL73q8REREANsfYyATBEHo6Is2b96M7777Dp999hkcHBy6ZiAyGXbu3ImZM2e2uc/f//53/PTTT1rt06OPPoqysjLExsYCAEJDQzFy5EisX78eAKDRaKBSqbB48WIsW7asXWNRq9VQKpUoLy+HnZ1d598UEREZpSVfncT3qbmYMsgVH80ZIfVw6IaO/P4278wfsH79eqSlpcHDwwPe3t6wtbXV+vmJEyc6c9g7SkpKQnh4uNa2iIgILF26FABQW1uLlJQUREdHiz83MzNDeHg4kpKS2jxuTU0NampqxOdqtbprB05EREZDq/3hlV8Gq1MB6HYtjS7l5+fD1dVVa5urqyvUajWuX7+O0tJSNDQ0tLrP+fPn2zzuypUr8dprr+lkzEREZFw+iLsIQQAiBrtisAfn/hiqTgWgFStWdPU4JBUdHY2oqCjxuVqthkqlknBERESkj9IKK/DDqcb253nO/TFonQpATVJSUnDu3DkAwODBgzFs2LAuGVRb3NzcUFBQoLWtoKAAdnZ2sLa2hlwuh1wub3UfNze3No+rUCigUCh0MmYiIjIeH8Slsf0xEp1aB6iwsBATJ07EyJEj8fzzz+P5559HcHAwJk2ahKKioq4eoygsLAxxcXFa2/bu3YuwsDAAgKWlJYKDg7X20Wg0iIuLE/chIiLqjIsFN9sf3vHd8HUqAC1evBgVFRU4e/YsSkpKUFJSgjNnzkCtVuP5559v93EqKyuRmpqK1NRUAI2XuaempiI7OxtA41dTc+bMEfd/9tlnkZGRgZdffhnnz5/Hhx9+iK+//hp/+9vfxH2ioqLw8ccfY/v27Th37hyee+45VFVVYd68eZ15q0RERACAD+Ib25+pg90wyINXCBu6Tn0FFhsbi3379mHgwIHitkGDBiEmJgZTpkxp93GOHz+OCRMmiM+b5uFERkZi27ZtyMvLE8MQAPj6+uKnn37C3/72N6xduxZeXl745JNPEBERIe4za9YsFBUVYfny5cjPz0dQUBBiY2NbTIwmIiJqr4sFFfiRc3+MSqfWAerZsyd+/fVXBAUFaW0/efIkxo0bZ/CXkXMdICIiam7xlyfxw2+5mDrYDRsfD5Z6ONSGjvz+7tRXYBMnTsSSJUuQm5srbrty5Qr+9re/YdKkSZ05JBERkV76o1n7w3V/jEenAtD69euhVqvh4+ODPn36oE+fPvD19YVarca6deu6eoxERESSaVr3Z9oQNwx057cCxqJTc4BUKhVOnDiBffv2iQsMDhw4sMUqzURERIbsj4IK/HQ6DwDn/hibDgeguro6WFtbIzU1FZMnT8bkyZN1MS4iIiLJsf0xXpLeDZ6IiEhfNW9/OPfH+HRqDtD/+3//D6+88gpKSkq6ejxERER6Ye2N9ue+oW4Y4Mb2x9gY1N3giYiIusOF/Ars5twfo2ZQd4MnIiLqDh+w/TF6HQ5A9fX1kMlkeOKJJ+Dl5aWLMREREUnmQn6zuT+855fR6vAcIHNzc/z73/9GfX29LsZDREQkqQ/iLgIApg91R3+3nhKPhnSl0ytBJyYmdvVYiIiIJNXU/shknPtj7Do1B2jatGlYtmwZTp8+jeDg4BaToB944IEuGRwREVF3Whv3BwDgPrY/Rq9TN0M1M2u7OJLJZAa/RhBvhkpEZHrO56sxdc2vkMmA2CX3MgAZoI78/u5UA6TRaDo1MCIiIn3VNPeH7Y9p6NAcoPvuuw/l5eXi81WrVqGsrEx8XlxcjEGDBnXZ4IiIiLrDuTw1dp/Oh0wGLOHcH5PQoQC0Z88e1NTUiM/feustrdWg6+vrceHCha4bHRERUTdofuVXP1e2P6agQwHo1ulCnZg+REREpFfO5anx85l8XvllYjp1GTwREZGxYPtjmjoUgGQyGWQyWYttREREhuj33JvtD+f+mJYOXQUmCALmzp0LhUIBAKiursazzz4rrgPUfH4QERGRvmtqf+4P8EBftj8mpUMBKDIyUuv5X//61xb7zJkz5+5GRERE1A1+z1Uj9uyNuT8T/aUeDnWzDgWgrVu36mocRERE3app1We2P6aJk6CJiMjknM0tx56zBTfm/rD9MUUMQEREZHKa5v7MCPCAvwvbH1PEAERERCalefvzPNsfk8UAREREJmXtPrY/xABEREQm5MyVcvzyewFXfSYGICIiMh1Nc38eCPSAv0sPiUdDUmIAIiIik9DU/pjJgMUT2f6YOgYgIiIyCWvZ/lAzDEBERGT0zlwpx94b7c8itj8EBiAiIjIBbH/oVgxARERk1Jq3P4t55RfdwABERERGbc2NdX8eDPJEH2e2P9SIAYiIiIzWmSvl2Heuae4PV32mmxiAiIjIaLH9obYwABERkVE6fflm+7OY7Q/dQi8CUExMDHx8fGBlZYXQ0FAkJye3ue/48eMhk8laPKZPny7uM3fu3BY/nzp1ane8FSIi0hNr4/4AAMwM8oQf2x+6hbnUA9ixYweioqKwceNGhIaGYs2aNYiIiMCFCxfg4uLSYv/vvvsOtbW14vPi4mIEBgbi4Ycf1tpv6tSp2Lp1q/hcoVDo7k0QEZFeaWx/Cjn3h9okeQP03nvv4amnnsK8efMwaNAgbNy4ETY2NtiyZUur+zs4OMDNzU187N27FzY2Ni0CkEKh0NrP3t6+O94OERHpgTX72P7Q7UkagGpra5GSkoLw8HBxm5mZGcLDw5GUlNSuY2zevBmPPvoobG1ttbYnJCTAxcUF/fv3x3PPPYfi4uI2j1FTUwO1Wq31ICIiw3Tqchnizhdy3R+6LUkD0NWrV9HQ0ABXV1et7a6ursjPz7/j65OTk3HmzBk8+eSTWtunTp2KTz/9FHFxcVi9ejUSExMxbdo0NDQ0tHqclStXQqlUig+VStX5N0VERJJae+PKr5nDPOHrZHuHvclUST4H6G5s3rwZQ4cORUhIiNb2Rx99VPznoUOHIiAgAH369EFCQgImTZrU4jjR0dGIiooSn6vVaoYgIiIDpNX+8J5fdBuSNkBOTk6Qy+UoKCjQ2l5QUAA3N7fbvraqqgpfffUV5s+ff8c/x8/PD05OTkhLS2v15wqFAnZ2dloPIiIyPGvY/lA7SRqALC0tERwcjLi4OHGbRqNBXFwcwsLCbvvab775BjU1NfjrX/96xz/n8uXLKC4uhru7+12PmYiI9NNvOWWIP18IuZkMz7P9oTuQ/CqwqKgofPzxx9i+fTvOnTuH5557DlVVVZg3bx4AYM6cOYiOjm7xus2bN2PmzJlwdHTU2l5ZWYmXXnoJR44cQVZWFuLi4vDggw/C398fERER3fKeiIio+zXd8X1mkCd82P7QHUg+B2jWrFkoKirC8uXLkZ+fj6CgIMTGxooTo7Ozs2Fmpp3TLly4gIMHD+KXX35pcTy5XI5Tp05h+/btKCsrg4eHB6ZMmYI33niDawERERmp1GbtD1d9pvaQCYIgSD0IfaNWq6FUKlFeXs75QEREBmDe1mTsv1CEh4K98M7DgVIPhyTSkd/fkn8FRkREdDdSc8qw/0IR5GYyLJrA9ofahwGIiIgM2tobqz7/aRjn/lD7MQAREZHBOpldKrY/nPtDHcEAREREBqvpyq8/D/OEtyPbH2o/BiAiIjJIJ7NLkdA094ftD3UQAxARERkktj90NxiAiIjI4Jxo1v7wnl/UGQxARERkcJru+P6X4Z7o7Wgj8WjIEDEAERGRQTmRXYrEP5rW/WH7Q53DAERERAZlDdsf6gIMQEREZDBSLpXiwB9FMGf7Q3eJAYiIiAxG05VffxnuxfaH7goDEBERGQSt9ofr/tBdYgAiIiKDsObGPb8eCvaCyoHtD90dBiAiItJ7KZdK8OvFqzA3k2Eh7/hOXYABiIiI9F7TlV9sf6irMAAREZFeY/tDusAAREREeq2p/Xl4BNsf6joMQEREpLeOZ91sfxaMZ/tDXYcBiIiI9FbTuj9sf6irMQAREZFeYvtDusQAREREeunm3B8V2x/qcgxARESkd45lleBgWtOVX32kHg4ZIQYgIiLSO02rPj88QgUve7Y/1PUYgIiISK8kZ5bgUFoxLORsf0h3GICIiEivrI1j+0O6xwBERER6o3n7s2A82x/SHQYgIiLSG5z7Q92FAYiIiPTC0YxiHE5vmvvDdX9ItxiAiIhILzSt+vzICBU8e1lLPBoydgxAREQkuebtzwK2P9QNGICIiEhyTas+zxrJ9oe6BwMQERFJ6khGMZIymq78YvtD3YMBiIiIJLW2WfvjwfaHugkDEBERSaap/bGUm7H9oW7FAERERJJpWveH7Q91NwYgIiKSRFJ6MY5klMBSbobnuOozdTO9CEAxMTHw8fGBlZUVQkNDkZyc3Oa+27Ztg0wm03pYWVlp7SMIApYvXw53d3dYW1sjPDwcFy9e1PXbICKiDmi65xfbH5KC5AFox44diIqKwooVK3DixAkEBgYiIiIChYWFbb7Gzs4OeXl54uPSpUtaP3/77bfxwQcfYOPGjTh69ChsbW0RERGB6upqXb8dIiJqh+btzwLe8Z0kIHkAeu+99/DUU09h3rx5GDRoEDZu3AgbGxts2bKlzdfIZDK4ubmJD1dXV/FngiBgzZo1+Mc//oEHH3wQAQEB+PTTT5Gbm4tdu3Z1wzsiIqI7aZr782iICu5Ktj/U/SQNQLW1tUhJSUF4eLi4zczMDOHh4UhKSmrzdZWVlfD29oZKpcKDDz6Is2fPij/LzMxEfn6+1jGVSiVCQ0PbPGZNTQ3UarXWg4iIdCMpvRhHMzn3h6QlaQC6evUqGhoatBocAHB1dUV+fn6rr+nfvz+2bNmC77//Hp9//jk0Gg1Gjx6Ny5cvA4D4uo4cc+XKlVAqleJDpVLd7VsjIqJWCIKA99n+kB6Q/CuwjgoLC8OcOXMQFBSEcePG4bvvvoOzszM2bdrU6WNGR0ejvLxcfOTk5HThiImIqElSRjGSb7Q/XPeHpCRpAHJycoJcLkdBQYHW9oKCAri5ubXrGBYWFhg2bBjS0tIAQHxdR46pUChgZ2en9SAioq4lCIJ4z6/ZISq4Ka3u8Aoi3ZE0AFlaWiI4OBhxcXHiNo1Gg7i4OISFhbXrGA0NDTh9+jTc3d0BAL6+vnBzc9M6plqtxtGjR9t9TCIi6nrN25/n2P6QxMylHkBUVBQiIyMxYsQIhISEYM2aNaiqqsK8efMAAHPmzIGnpydWrlwJAHj99dcxatQo+Pv7o6ysDP/+979x6dIlPPnkkwAarxBbunQp3nzzTfTt2xe+vr745z//CQ8PD8ycOVOqt0lEZNIEQcCavWx/SH9IHoBmzZqFoqIiLF++HPn5+QgKCkJsbKw4iTk7OxtmZjeLqtLSUjz11FPIz8+Hvb09goODcfjwYQwaNEjc5+WXX0ZVVRWefvpplJWVYezYsYiNjW2xYCIREXWPpPRiJGeVwNKc7Q/pB5kgCILUg9A3arUaSqUS5eXlnA9EBqHsWi1kMhmU1hZSD4WoBUEQMGvTESRnlWDuaB+8+sBgqYdERqojv78lb4CIqPPSCivxYUIavk/NhUYQMMDNDqG+Dhjl54AQX0c42FpKPUQiHNZqf7juD+kHBiAiA3Q+X4318Wn46XQemne45/LUOJenxrbDWQCAfq49EOrriFA/B4T6OsK5p0KaAZPJarzyq3Hdn/8L6Q1XO05FIP3AAERkQE5dLsO6+DTs/f3mMg+TB7li0QR/uPeyQnJmCY5mlOBoZjH+KKgUH58dabxfnp+zLUJ9HTHqRiDiRFTStcPpxTiWVcr2h/QOAxCRAUi5VIIP4tKQ+EcRAEAmA+4b6o5FE/wx0P3m99z3B3jg/gAPAEBxZQ2OZZXgSEYJjmaW4Hy+GhlFVcgoqsKXydkAAG9HG4T6OogtkZe9Tfe/OTJabH9In3ESdCs4CZr0gSAISMooxrq4NCRlFAMA5GYyPBjogQUT+sDfpWeHjld2rbaxIcpsbIh+z1VDc8u//Z69rDHKrzEMjfJ1hMrBGjKZrKveEpmYgxev4q+bj0JhboYDL09gACKd4yRoIgMmCAIS/ijC+vg0pFwqBQBYyGX4y3AvPDe+D7wdbTt13F42lpgy2A1TBjeuiK6ursPxrMavzI5kluDMlXJcKbuOb09cxrcnGu+t5660amyI/BwR6usAXydbBiJqF632J5TtD+kfNkCtYANEUtBoBOw9V4D18Wk4faUcAGBpboZHR6rwzLg+8Oyl25tGVtbUI+VSKY5mNN6p+9TlMtQ1aP/nwbmnQgxEo3wd4O/Sg4GIWtW8/fn15QlwYQCibsAGiMiANGgE7D6dh5j9aTifXwEAsLaQ47HQ3nj6Xr9u+8XRQ2GOcf2cMa6fMwDgem0DTmQ3BqIjmSVIzSlDUUUNfjyVhx9P5QEAHG0tEeLrIIai/q49YWbGQGTqbm1/GH5IHzEAEUmkvkGD71NzEZOQhoyiKgCNISRytDeeGOMLxx7SXrJubSnHGH8njPF3AgBU1zUgNadMvMrsRHYpiqtq8fOZfPx8Jh8A0MvGAiN9HG6sReSIge52kDMQmZyDaVdx/FIpFOZmeG4cr/wi/cQARNTNaus1+PbEZXyYkIackusAAKW1BZ4Y44u5o32gtNHP1ZytLOQY5eeIUX6OAPqitl6DU5fLcDSzBEcyipFyqRRl1+qw9/cC8TL9nlbmYiAK9XPEEA87mMslvQcz6VjzO74/FurN9of0FucAtYJzgEgXqusasONYDjYmpiOvvBpA41dIT97jh7+O6o2eVvoZfNqrrkGDM1fKG68yyyjG8axSVNTUa+1jaylHsNgQOWCoZy9YmjMQGZNfLxbh8c3JnPtDkuAcICI9UlVTjy+OZuOjXzNQVFEDAHDpqcAz4/pgdogKNpbG8a+hhdwMw3rbY1hvezw7rg/qGzQ4l1eBo5nFOJJRguTMYqir63HgjyIcuLGekbWFHMHe9mJDFKhSQmEul/idUGcJgoD39zbO/WH7Q/qODVAr2ABRV1BX1+GzpEv45NcMlF6rA9C4zs6z4/vg4WAvWFmY1i/6Bo2A8/lqcQ5RcmaJeF6aKMzNMKx3L3FhxuG97U3uPBmyA38UYc6WG+3P3yfApScDEHUvNkBEEiq7Vosth7Kw7VAm1NWNXwF5O9pg4Xh/zBzmabJf+cjNZBjsocRgDyWeGOsLjUbAxcJKHM0sFkPR1cpaHMloXL0acYCl3AyBKqUYiIK97Y2mMTM2za/8+usob4Yf0ntsgFrBBog642plDT7+NQOfJ11CVW0DAMDfpQcWTfDH/QHunPx7B4IgIL2oSisQFahrtPYxN5NhqNfNQDTC297g504ZC7Y/pA868vubAagVDEDUEfnl1dh0IB1fJmejuk4DABjobofFE/0xdbAb18XpJEEQcKn4WrNAVIIrZde19jGTAUM8leL9zEb6OkBpzUDU3QRBwJ83HMbJ7DLMH+uLf94/SOohkYliALpLDEDUHjkl17AxMR3fHL+M2obG4BOo6oXnJ/pj4gAXrpCsAzkl18SrzI5mliC75JrWz2UyYKCbHUJv3O0+1NcB9raWEo3WdCT+UYTILcmwsmi85xfbH5IK5wAR6VDm1Sp8uD8NO09eQf2Nu4mG+Dhg8SR/jPV3YvDRIZWDDVQONngo2AsAkFd+Xfy67GhGCTKuVuH3PDV+z1Nj66EsAMAAt57iVWYhvg5wkniBSWOjNfcnlHN/yHCwAWoFGyBqzR8FFYjZn4YffssV76I+1t8Jiyf6I9TPUdrBEQCgUF0t3u3+aEYJLhZWttjH36WH1v3MeKn23Um4UIi5W4/BysIMv748Ec49GTBJOmyAiLrQmSvliNmfJt7uAQAmDXDBwon+GN7bXsKR0a1c7KwwI9ADMwI9ADROTD+WWSKuVn0+vwJphZVIK6zEf45mAwB8nWxvBKLGr808dHzTWWPSfNXnx0d5M/yQQWED1Ao2QAQAJ7NLsT4+DXHnC8VtUwe7YdFEfwzxVEo4Muqs0qpaJGeViF+b/Z6nxq3/BVQ5WIvzh0b5OcLL3ppfa7aB7Q/pGzZARHfhaEYx1sWn4WDaVQCNVxrNCPTAwgn+6OfaU+LR0d2wt7VExGA3RAx2AwCUX6/D8awScWL1mVw1ckquI6fkMv6bchkA4KG0Qqifo/i1mY+jDQMR2P6Q4WMD1Ao2QKZHEAQcTLuKdXFpSM4qAdC45syfhnniufF94OfcQ+IRUneorKnXCkSnLpeLE92buNopECI2RA7o49zDJAPR/guFmMf2h/QMGyCidhIEAfHnC7EuPg2pOWUAGlcffniEF54d1wcqBxtpB0jdqofCHOP7u2B8fxcAwLXaepy4VCZOqk7NKUOBugY//JaLH37LBQA49bBEyI11iEL9HNDPpafRr/3UvP2ZE+bD8EMGiQGITJJGIyD2bD7WxafhXJ4aQON9qP4vtDeevtcP7kpOhCXAxtIcY/s6YWxfJwBAdV0DTmbfDEQnsktxtbIWu0/nY/fpxkny9jYWWoFooJud0QWihD+K8FtOGawszPD0vX5SD4eoUxiAyKTUN2jw46k8xOxPEy+RtrWU469h3nhyrB//Jku3ZWUhR1gfR4T1aVz2oKa+Aacul4sLMx7PKkXptTrsOVuAPWcLAAB2VuZagWiQu51B3xZFEASsuXHH9zlhPlxXiQwWAxCZhLoGDXaeuIIPE9KQVdy4enBPK3PMG+2DeWN8uVowdYrCXI6RPg4Y6eOARWj8nJ2+Ui5eZXY8qxTq6nrsO1eIfecarybsoTDHCB97MRAN9VTCwoACUcKFIvx2uRzWFnK2P2TQGIDIqFXXNeCblMvYmJAu3kfK3sYC88f6Ys5oH9jxRprUhSzkZhje2x7De9vjufF9UN+gwe95ajEQJWeWQF1dj4QLRUi4UAQAsLGUI9jbXrzKLMBLCYW5XOJ30rrmqz7PCfNm+0MGjVeBtYJXgRm+67UN+CI5Gx8dSBfvKO7UQ4Fn7vXD/4X2hq2C2Z+6X4NGwLk8tXiVWXJWCcqu1WntozBvDFFNCzMO690LVhb6EYj2ny/EvG3HYG0hx69/n8AARHqHV4GRyaqsqcdnSZfwya8ZKK6qBQC4K63w7Lg+mDVSpTe/SMg0yc1kGOKpxBBPJeaP9YVGI+CPwgqt+5kVV9UiKaMYSRnFAC7CUm6GIFUvhPo1Lsw4vLc9rC27/3MsCALeZ/tDRoQNUCvYABme8mt12HY4C1sOZaL8euPfqFUO1lgw3h9/Hu6pt18pEDUnCALSiypxJOPmWkSFFTVa+1jIZQjw6iV+ZRbsbY8e3dBoxp8vwBPbjsPaQo6Df58ARwYg0kMd+f3NANQKBiDDUVxZgy2HMvHp4UuoqKkHAPg522LheH88EORhUJNLiW4lCAKyiq+JV5kdzShGbnm11j5NrdKoG/czG+Hj0OVz2wRBwIMxh3DqcjmeGeeH6GkDu/T4RF2FAeguMQDpv0J1NT7+NQOfH8nG9boGAEB/155YNNEf9w11h9zI1l0hAhqDyOXS6zjSFIgyi5FTcl1rHzMZMMjDTryfWYivA3rZ3N1Vjk3tj42lHL++zPaH9BfnAJHRyi27jk2J6fjyWA5q6zUAgKGeSiya6I/JA12NbsE5ouZkMhlUDjZQOdjg4REqAI3/TjTNHzqaWYLMq1U4c0WNM1fU2HwwEzJZ418ORvndDEQdCTC3rvrM8EPGgg1QK9gA6Z/s4mv4MCEN3564jLqGxo/s8N69sHhSX4zv52yS92Iiak2Bulr8uuxIRjHSi6pa7NPXpYd4lVmonwNcelq1eby4cwWYv53tDxkGNkBkNNIKK/Hh/jR8/1suGm7clDLMzxGLJ/ojrI8jgw/RLVztrPBAoAceCPQAABRV1CA58+ZVZhcKKnCxsBIXCyvx+ZFsAICfk61WIGq6FQzbHzJmbIBawQZIeufy1Fi/Pw27T+eh6RM6rp8zFk/0xwgfB2kHR2TASqpqtQLRuXw1bv0t0NvBBqG+DnDqqcCGhHTYWMpx8O8T4cAV00nPGVwDFBMTg3//+9/Iz89HYGAg1q1bh5CQkFb3/fjjj/Hpp5/izJkzAIDg4GC89dZbWvvPnTsX27dv13pdREQEYmNjdfcmqEuculyGdfFp2Pt7gbht8iBXLJ7ojwCvXtINjMhIONhaYuoQN0wd4gagcQmJY1k3AlFmCc5cKUd2yTVkl1wTXxM52ofhh4yO5AFox44diIqKwsaNGxEaGoo1a9YgIiICFy5cgIuLS4v9ExISMHv2bIwePRpWVlZYvXo1pkyZgrNnz8LT01Pcb+rUqdi6dav4XKFgdavPjmeVYF18GhL/aLw9gEwG3DfUHYsm+GOgO1s4Il1R2lggfJArwge5AgAqqutw/FKpuDijXCbD0/fwnl9kfCT/Ciw0NBQjR47E+vXrAQAajQYqlQqLFy/GsmXL7vj6hoYG2NvbY/369ZgzZw6AxgaorKwMu3btatcYampqUFNzc7ExtVoNlUrFr8B0TBAEJKUXY1182o1VbxvXNHkwyAMLxvvD36WHxCMkIiJDYjBfgdXW1iIlJQXR0dHiNjMzM4SHhyMpKaldx7h27Rrq6urg4KA9LyQhIQEuLi6wt7fHxIkT8eabb8LR0bHVY6xcuRKvvfZa598IdYggCEj4owjr49OQcqkUQOPqtg8Fe+HZcX3g7Wgr8QiJiMjYSRqArl69ioaGBri6umptd3V1xfnz59t1jL///e/w8PBAeHi4uG3q1Kn485//DF9fX6Snp+OVV17BtGnTkJSUBLm85S0RoqOjERUVJT5vaoCoa2k0AvaeK8D6+DScvlIOALA0N8PskSo8Pa4PPHtZSzxCIiIyFZLPAbobq1atwldffYWEhARYWd1cx+LRRx8V/3no0KEICAhAnz59kJCQgEmTJrU4jkKh4BwhHWrQCNh9Og8x+9NwPr8CAGBtIcdfR/XGU/f4wcWu7TVIiIiIdEHSAOTk5AS5XI6CggKt7QUFBXBzc7vta9955x2sWrUK+/btQ0BAwG339fPzg5OTE9LS0loNQKQbdQ0a/C81FzEJaci4sRhbD4U5Ikd7Y/5YP15VQkREkpE0AFlaWiI4OBhxcXGYOXMmgMZJ0HFxcVi0aFGbr3v77bfxr3/9C3v27MGIESPu+OdcvnwZxcXFcHd376qh023U1Dfg25Qr2JCYJt6nSGltgSfG+GLuaB8obbr2Ro1EREQdJflXYFFRUYiMjMSIESMQEhKCNWvWoKqqCvPmzQMAzJkzB56enli5ciUAYPXq1Vi+fDm++OIL+Pj4ID8/HwDQo0cP9OjRA5WVlXjttdfwl7/8BW5ubkhPT8fLL78Mf39/RERESPY+TUF1XQO+Ss7GpgMZyLtxx2pHW0s8eY8fHg/zRg+F5B83IiIiAHoQgGbNmoWioiIsX74c+fn5CAoKQmxsrDgxOjs7G2ZmZuL+GzZsQG1tLR566CGt46xYsQKvvvoq5HI5Tp06he3bt6OsrAweHh6YMmUK3njjDc7z0ZGqmnr85+glfHQgE1crG5cTcLVT4Jl7+2B2SG9YW7aceE5ERCQlydcB0ke8FUb7qKvr8OnhLGw+mInSa3UAAM9e1nhufB88FOwFKwsGHyIi6j4Gsw4QGabSqlpsPZSJrYezUFFdDwDwcbTBggn++NMwT1jIze5wBCIiImkxAFG7FVXU4JODGfg86RKqahsAAH1demDRRH9MH+oOcwYfIiIyEAxAdEf55dXYdCAdXyZno7pOAwAY5G6HxRP9ETHYDWZmMolHSERE1DEMQNSmnJJr2JiYjm+OX0ZtQ2PwCVT1wvMT/TFxgAtkMgYfIiIyTAxA1ELm1SrE7E/DrpNXUK9pnCMf4uOAxZP8MdbficGHiIgMHgMQif4oqMD6+DT8eCoXN3IP7unrhEUT/BHq1/qNZImIiAwRAxDhzJVyrI9PQ+zZfHHbpAEuWDjRH8N720s4MiIiIt1gADJhJ7JLsT4+DfHnC8Vt04a4YeEEfwzxVEo4MiIiIt1iADJBRzKKsT4+DQfTrgIAzGTAjEAPLJzgj36uPSUeHRERke4xAJkIQRDw68WrWB+fhuSsEgCAuZkMfxrmiQUT/OHrZCvxCImIiLoPA5CREwQBcecKsW5/Gn7LKQMAWMrN8PAILzw7rg9UDjbSDpCIiEgCDEBGSqMREHs2H+vi03AuTw0AsLIww/+FeOPpe/3gprSSeIRERETSYQAyMvUNGvx4Kg/r96chrbASAGBrKcfjYT6YP9YXzj0VEo+QiIhIegxARqK2XoNdJ6/gw4Q0ZBVfAwD0tDLHvDG+mDfaB/a2lhKPkIiISH8wABm46roGfJNyGRsT0nGl7DoAwN7GAk/e44fHw7xhZ2Uh8QiJiIj0DwOQgbpe24AvkrPx0YF0FKhrAABOPRR45l4//F9ob9gq+H8tERFRW/hb0sBUVNfhsyOXsPnXTBRX1QIA3JVWeHZcH8waqYKVhVziERIREek/BiADUX6tDlsPZ2LroSyUX68DAKgcrLFgvD/+MtwLluZmEo+QiIjIcDAA6bniyhpsPpiJT5MuobKmHgDg52yLRRP88UCgB8zlDD5EREQdxQCkpwrV1fjoQAb+czQb1+saAAAD3Hpi0UR/TBviDrmZTOIREhERGS4GID1zpew6NiWm46tjOait1wAAhnoqsXiiP8IHusKMwYeIiOiuMQDpiUvFVdiQkI5vT1xGXYMAAAj2tsfiif4Y188ZMhmDDxERUVdhAJJYWmEFPtyfju9/y0WDpjH4hPk5YvEkf4T5OTL4EBER6QADkETO5amxPj4Nu8/kQWjMPRjf3xmLJvhjhI+DtIMjIiIycgxA3ey3nDKsi0/DvnMF4rYpg1yxaKI/Arx6STcwIiIiE8IA1I3e/PF3fHIwEwAgkwHTh7pj4QR/DHS3k3hkREREpoUBqBuF+Dpg6+EsPBjkgQXj/eHv0kPqIREREZkkBqBuFD7QFQkvjofKwUbqoRAREZk0LiPcjczMZAw/REREeoABiIiIiEwOAxARERGZHAYgIiIiMjkMQERERGRyGICIiIjI5DAAERERkclhACIiIiKToxcBKCYmBj4+PrCyskJoaCiSk5Nvu/8333yDAQMGwMrKCkOHDsXu3bu1fi4IApYvXw53d3dYW1sjPDwcFy9e1OVbICIiIgMieQDasWMHoqKisGLFCpw4cQKBgYGIiIhAYWFhq/sfPnwYs2fPxvz583Hy5EnMnDkTM2fOxJkzZ8R93n77bXzwwQfYuHEjjh49CltbW0RERKC6urq73hYRERHpMZkgCIKUAwgNDcXIkSOxfv16AIBGo4FKpcLixYuxbNmyFvvPmjULVVVV+PHHH8Vto0aNQlBQEDZu3AhBEODh4YEXXngBL774IgCgvLwcrq6u2LZtGx599NE7jkmtVkOpVKK8vBx2drxRKRERkSHoyO9vSRug2tpapKSkIDw8XNxmZmaG8PBwJCUltfqapKQkrf0BICIiQtw/MzMT+fn5WvsolUqEhoa2ecyamhqo1WqtBxERERkvSQPQ1atX0dDQAFdXV63trq6uyM/Pb/U1+fn5t92/6X87csyVK1dCqVSKD5VK1an3Q0RERIZB8jlA+iA6Ohrl5eXiIycnR+ohERERkQ6ZS/mHOzk5QS6Xo6CgQGt7QUEB3NzcWn2Nm5vbbfdv+t+CggK4u7tr7RMUFNTqMRUKBRQKhfi8aVoUvwojIiIyHE2/t9szvVnSAGRpaYng4GDExcVh5syZABonQcfFxWHRokWtviYsLAxxcXFYunSpuG3v3r0ICwsDAPj6+sLNzQ1xcXFi4FGr1Th69Ciee+65do2roqICAPhVGBERkQGqqKiAUqm87T6SBiAAiIqKQmRkJEaMGIGQkBCsWbMGVVVVmDdvHgBgzpw58PT0xMqVKwEAS5Yswbhx4/Duu+9i+vTp+Oqrr3D8+HF89NFHAACZTIalS5fizTffRN++feHr64t//vOf8PDwEEPWnXh4eCAnJwc9e/aETCbr0verVquhUqmQk5PDK8zugOeq/Xiu2o/nqv14rtqP56r9dHmuBEFARUUFPDw87riv5AFo1qxZKCoqwvLly5Gfn4+goCDExsaKk5izs7NhZnZzqtLo0aPxxRdf4B//+AdeeeUV9O3bF7t27cKQIUPEfV5++WVUVVXh6aefRllZGcaOHYvY2FhYWVm1a0xmZmbw8vLq2jd6Czs7O/5L0k48V+3Hc9V+PFftx3PVfjxX7aerc3Wn5qeJ5OsAmRquMdR+PFftx3PVfjxX7cdz1X48V+2nL+eKV4ERERGRyWEA6mYKhQIrVqzQuuqMWsdz1X48V+3Hc9V+PFftx3PVfvpyrvgVGBEREZkcNkBERERkchiAiIiIyOQwABEREZHJYQAiIiIik8MA1IUOHDiAGTNmwMPDAzKZDLt27brjaxISEjB8+HAoFAr4+/tj27ZtOh+nPujouUpISIBMJmvxyM/P754BS2jlypUYOXIkevbsCRcXF8ycORMXLly44+u++eYbDBgwAFZWVhg6dCh2797dDaOVVmfO1bZt21p8rtq7aKoh27BhAwICAsTF6MLCwvDzzz/f9jWm+JkCOn6uTPUz1ZpVq1aJd2i4HSk+WwxAXaiqqgqBgYGIiYlp1/6ZmZmYPn06JkyYgNTUVCxduhRPPvkk9uzZo+ORSq+j56rJhQsXkJeXJz5cXFx0NEL9kZiYiIULF+LIkSPYu3cv6urqMGXKFFRVVbX5msOHD2P27NmYP38+Tp48iZkzZ2LmzJk4c+ZMN468+3XmXAGNK9I2/1xdunSpm0YsHS8vL6xatQopKSk4fvw4Jk6ciAcffBBnz55tdX9T/UwBHT9XgGl+pm517NgxbNq0CQEBAbfdT7LPlkA6AUDYuXPnbfd5+eWXhcGDB2ttmzVrlhAREaHDkemf9pyr/fv3CwCE0tLSbhmTPissLBQACImJiW3u88gjjwjTp0/X2hYaGio888wzuh6eXmnPudq6daugVCq7b1B6zN7eXvjkk09a/Rk/U9pud674mRKEiooKoW/fvsLevXuFcePGCUuWLGlzX6k+W2yAJJSUlITw8HCtbREREUhKSpJoRPovKCgI7u7umDx5Mg4dOiT1cCRRXl4OAHBwcGhzH362GrXnXAFAZWUlvL29oVKp7vg3e2PU0NCAr776ClVVVQgLC2t1H36mGrXnXAH8TC1cuBDTp09v8ZlpjVSfLclvhmrK8vPzxZu+NnF1dYVarcb169dhbW0t0cj0j7u7OzZu3IgRI0agpqYGn3zyCcaPH4+jR49i+PDhUg+v22g0GixduhRjxozRugHwrdr6bJnCnKkm7T1X/fv3x5YtWxAQEIDy8nK88847GD16NM6ePavzmyJL7fTp0wgLC0N1dTV69OiBnTt3YtCgQa3ua+qfqY6cK1P+TAHAV199hRMnTuDYsWPt2l+qzxYDEBmE/v37o3///uLz0aNHIz09He+//z4+++wzCUfWvRYuXIgzZ87g4MGDUg9F77X3XIWFhWn9TX706NEYOHAgNm3ahDfeeEPXw5RU//79kZqaivLycvz3v/9FZGQkEhMT2/zFbso6cq5M+TOVk5ODJUuWYO/evXo/8ZsBSEJubm4oKCjQ2lZQUAA7Ozu2P+0QEhJiUkFg0aJF+PHHH3HgwIE7/i2yrc+Wm5ubLoeoNzpyrm5lYWGBYcOGIS0tTUej0x+Wlpbw9/cHAAQHB+PYsWNYu3YtNm3a1GJfU/9MdeRc3cqUPlMpKSkoLCzUauYbGhpw4MABrF+/HjU1NZDL5VqvkeqzxTlAEgoLC0NcXJzWtr179972e2W6KTU1Fe7u7lIPQ+cEQcCiRYuwc+dOxMfHw9fX946vMdXPVmfO1a0aGhpw+vRpk/hs3Uqj0aCmpqbVn5nqZ6ottztXtzKlz9SkSZNw+vRppKamio8RI0bgscceQ2pqaovwA0j42dLpFGsTU1FRIZw8eVI4efKkAEB47733hJMnTwqXLl0SBEEQli1bJjz++OPi/hkZGYKNjY3w0ksvCefOnRNiYmIEuVwuxMbGSvUWuk1Hz9X7778v7Nq1S7h48aJw+vRpYcmSJYKZmZmwb98+qd5Ct3nuuecEpVIpJCQkCHl5eeLj2rVr4j6PP/64sGzZMvH5oUOHBHNzc+Gdd94Rzp07J6xYsUKwsLAQTp8+LcVb6DadOVevvfaasGfPHiE9PV1ISUkRHn30UcHKyko4e/asFG+h2yxbtkxITEwUMjMzhVOnTgnLli0TZDKZ8MsvvwiCwM9Ucx09V6b6mWrLrVeB6ctniwGoCzVdqn3rIzIyUhAEQYiMjBTGjRvX4jVBQUGCpaWl4OfnJ2zdurXbxy2Fjp6r1atXC3369BGsrKwEBwcHYfz48UJ8fLw0g+9mrZ0nAFqflXHjxonnrsnXX38t9OvXT7C0tBQGDx4s/PTTT907cAl05lwtXbpU6N27t2BpaSm4uroK9913n3DixInuH3w3e+KJJwRvb2/B0tJScHZ2FiZNmiT+QhcEfqaa6+i5MtXPVFtuDUD68tmSCYIg6LZjIiIiItIvnANEREREJocBiIiIiEwOAxARERGZHAYgIiIiMjkMQERERGRyGICIiIjI5DAAERERkclhACIiIiKTwwBERJLIysqCTCZDamqq1EMRnT9/HqNGjYKVlRWCgoKkHg4R6RADEJGJmjt3LmQyGVatWqW1fdeuXZDJZBKNSlorVqyAra0tLly40OLmjE2aztutj6640/f48eOxdOnSuz4OEd0ZAxCRCbOyssLq1atRWloq9VC6TG1tbadfm56ejrFjx8Lb2xuOjo5t7jd16lTk5eVpPTpz53lduZtzQGQqGICITFh4eDjc3NywcuXKNvd59dVXW3wdtGbNGvj4+IjP586di5kzZ+Ktt96Cq6srevXqhddffx319fV46aWX4ODgAC8vL2zdurXF8c+fP4/Ro0fDysoKQ4YMQWJiotbPz5w5g2nTpqFHjx5wdXXF448/jqtXr4o/Hz9+PBYtWoSlS5fCyckJERERrb4PjUaD119/HV5eXlAoFAgKCkJsbKz4c5lMhpSUFLz++uuQyWR49dVX2zwnCoUCbm5uWg+5XI733nsPQ4cOha2tLVQqFRYsWIDKykqt1x46dAjjx4+HjY0N7O3tERERgdLSUsydOxeJiYlYu3at2CplZWUBABITExESEgKFQgF3d3csW7YM9fX1tz0HgiDg1VdfRe/evaFQKODh4YHnn3++zfdEZGoYgIhMmFwux1tvvYV169bh8uXLd3Ws+Ph45Obm4sCBA3jvvfewYsUK3H///bC3t8fRo0fx7LPP4plnnmnx57z00kt44YUXcPLkSYSFhWHGjBkoLi4GAJSVlWHixIkYNmwYjh8/jtjYWBQUFOCRRx7ROsb27dthaWmJQ4cOYePGja2Ob+3atXj33Xfxzjvv4NSpU4iIiMADDzyAixcvAgDy8vIwePBgvPDCC8jLy8OLL77Y4XNgZmaGDz74AGfPnsX27dsRHx+Pl19+Wfx5amoqJk2ahEGDBiEpKQkHDx7EjBkz0NDQgLVr1yIsLAxPPfWU2CqpVCpcuXIF9913H0aOHInffvsNGzZswObNm/Hmm2/e9hx8++23eP/997Fp0yZcvHgRu3btwtChQzv8noiMls7vN09EeikyMlJ48MEHBUEQhFGjRglPPPGEIAiCsHPnTqH5fxpWrFghBAYGar32/fffF7y9vbWO5e3tLTQ0NIjb+vfvL9xzzz3i8/r6esHW1lb48ssvBUEQhMzMTAGAsGrVKnGfuro6wcvLS1i9erUgCILwxhtvCFOmTNH6s3NycgQAwoULFwRBEIRx48YJw4YNu+P79fDwEP71r39pbRs5cqSwYMEC8XlgYKCwYsWK2x4nMjJSkMvlgq2trfh46KGHWt33m2++ERwdHcXns2fPFsaMGdPmsceNGycsWbJEa9srr7wi9O/fX9BoNOK2mJgYoUePHuL5bu0cvPvuu0K/fv2E2tra274fIlPFBoiIsHr1amzfvh3nzp3r9DEGDx4MM7Ob/0lxdXXVahzkcjkcHR1RWFio9bqwsDDxn83NzTFixAhxHL/99hv279+PHj16iI8BAwYAaJyv0yQ4OPi2Y1Or1cjNzcWYMWO0to8ZM6ZT73nChAlITU0VHx988AEAYN++fZg0aRI8PT3Rs2dPPP744yguLsa1a9cA3GyAOuLcuXMICwvTmpg+ZswYVFZWarVpt56Dhx9+GNevX4efnx+eeuop7Ny5U+trMyJTxwBERLj33nsRERGB6OjoFj8zMzODIAha2+rq6lrsZ2FhofVcJpO1uk2j0bR7XJWVlZgxY4ZW2EhNTcXFixdx7733ivvZ2tq2+5hdwdbWFv7+/uLD3d0dWVlZuP/++xEQEIBvv/0WKSkpiImJAXBzUrK1tbVOx9ScSqXChQsX8OGHH8La2hoLFizAvffe2+r/d0SmiAGIiAAAq1atwg8//ICkpCSt7c7OzsjPz9cKQV25ds+RI0fEf66vr0dKSgoGDhwIABg+fDjOnj0LHx8frcDh7+/fodBjZ2cHDw8PHDp0SGv7oUOHMGjQoC55HykpKdBoNHj33XcxatQo9OvXD7m5uVr7BAQEtHl5PQBYWlqioaFBa9vAgQORlJSkdf4PHTqEnj17wsvL67Zjsra2xowZM/DBBx8gISEBSUlJOH36dCfeHZHxYQAiIgDA0KFD8dhjj4lf5zQZP348ioqK8PbbbyM9PR0xMTH4+eefu+zPjYmJwc6dO3H+/HksXLgQpaWleOKJJwAACxcuRElJCWbPno1jx44hPT0de/bswbx581oEhTt56aWXsHr1auzYsQMXLlzAsmXLkJqaiiVLlnTJ+/D390ddXR3WrVuHjIwMfPbZZy0mZEdHR+PYsWNYsGABTp06hfPnz2PDhg3iVW0+Pj44evQosrKycPXqVWg0GixYsAA5OTlYvHgxzp8/j++//x4rVqxAVFSU1leOt9q2bRs2b96MM2fOICMjA59//jmsra3h7e3dJe+XyNAxABGR6PXXX2/xFdXAgQPx4YcfIiYmBoGBgUhOTu7UFVJtWbVqFVatWoXAwEAcPHgQ//vf/+Dk5AQAYmvT0NCAKVOmYOjQoVi6dCl69ep121/+rXn++ecRFRWFF154AUOHDkVsbCz+97//oW/fvl3yPgIDA/Hee+9h9erVGDJkCP7zn/+0WF6gX79++OWXX/Dbb78hJCQEYWFh+P7772Fubg4AePHFFyGXyzFo0CA4OzsjOzsbnp6e2L17N5KTkxEYGIhnn30W8+fPxz/+8Y/bjqdXr174+OOPMWbMGAQEBGDfvn344Ycfbru+EZEpkQm3frlPREREZOTYABEREZHJYQAiIiIik8MARERERCaHAYiIiIhMDgMQERERmRwGICIiIjI5DEBERERkchiAiIiIyOQwABEREZHJYQAiIiIik8MARERERCbn/wMw5PbYwpNNqgAAAABJRU5ErkJggg==\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkAAAAGwCAYAAABB4NqyAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/av/WaAAAACXBIWXMAAA9hAAAPYQGoP6dpAABMRUlEQVR4nO3deVxU9eI+8GcYYFiUkX0dWcRdAUVB1HJD0czy3luZ326i2eqSXlqu9LtX225qt0VTUiu36lbWLe1WhikIpqIoSi6pySYoq2wDKOuc3x/IkRFQQIYzy/N+veZ1m8OZ42fOneLxmc/5HJkgCAKIiIiITIiZ1AMgIiIi6m4MQERERGRyGICIiIjI5DAAERERkclhACIiIiKTwwBEREREJocBiIiIiEyOudQD0EcajQa5ubno2bMnZDKZ1MMhIiKidhAEARUVFfDw8ICZ2e07HgagVuTm5kKlUkk9DCIiIuqEnJwceHl53XYfBqBW9OzZE0DjCbSzs5N4NERERNQearUaKpVK/D1+OwxArWj62svOzo4BiIiIyMC0Z/oKJ0ETERGRyWEAIiIiIpPDAEREREQmhwGIiIiITA4DEBEREZkcBiAiIiIyOQxAREREZHIYgIiIiMjkMAARERGRyWEAIiIiIpPDAEREREQmhwGIiIiITA4DEBEREXWbypp6HMsqkXoYDEBERETUfbYfzsLDG5MQ/d1pScfBAERERETdorKmHh//mgEACPV1kHQsDEBERETULbYfzkLZtTr4OdtiRqCHpGNhACIiIiKdq6iuE9ufJZP6Qm4mk3Q8DEBERESkc58mXRLbn/sDpG1/AAYgIiIi0jF9a38ABiAiIiLSsaa5P330pP0BGICIiIhIhxrbn0wAwPN60v4ADEBERESkQ9sPZ6H8un61PwADEBEREemIvrY/AAMQERER6ci2Q43tj79LD71qfwAGICIiItIBdXUdPjmon+0PwABEREREOrC9Wfszfai71MNpQdIAdODAAcyYMQMeHh6QyWTYtWvXbfefO3cuZDJZi8fgwYPFfV599dUWPx8wYICO3wkRERE1Uevhuj+3kjQAVVVVITAwEDExMe3af+3atcjLyxMfOTk5cHBwwMMPP6y13+DBg7X2O3jwoC6GT0RERK3YdigL6up69HXpgfv0sP0BAHMp//Bp06Zh2rRp7d5fqVRCqVSKz3ft2oXS0lLMmzdPaz9zc3O4ubl12TiJiIiofdTVdfjkRvujj3N/mhj0HKDNmzcjPDwc3t7eWtsvXrwIDw8P+Pn54bHHHkN2dvZtj1NTUwO1Wq31ICIioo4zhPYHMOAAlJubi59//hlPPvmk1vbQ0FBs27YNsbGx2LBhAzIzM3HPPfegoqKizWOtXLlSbJeUSiVUKpWuh09ERGR0yq/fbH+WhOtv+wMYcADavn07evXqhZkzZ2ptnzZtGh5++GEEBAQgIiICu3fvRllZGb7++us2jxUdHY3y8nLxkZOTo+PRExERGR+t9meI/rY/gMRzgDpLEARs2bIFjz/+OCwtLW+7b69evdCvXz+kpaW1uY9CoYBCoejqYRIREZmM8ut12HzwZvtjpsftD2CgDVBiYiLS0tIwf/78O+5bWVmJ9PR0uLvrdxIlIiIyZFsPZUJdXY9+rvrf/gASB6DKykqkpqYiNTUVAJCZmYnU1FRx0nJ0dDTmzJnT4nWbN29GaGgohgwZ0uJnL774IhITE5GVlYXDhw/jT3/6E+RyOWbPnq3T90JERGSqGtufxlWfl0zqp/ftDyDxV2DHjx/HhAkTxOdRUVEAgMjISGzbtg15eXktruAqLy/Ht99+i7Vr17Z6zMuXL2P27NkoLi6Gs7Mzxo4diyNHjsDZ2Vl3b4SIiMiEbT2UiYob7c+0IYaxDI1MEARB6kHoG7VaDaVSifLyctjZ2Uk9HCIiIr1Vfr0OY1fHo6K6HjH/NxzTA6T7+qsjv78Ncg4QERER6YctBxvbn/6uPQ2m/QEYgIiIiKiTyq/XYcuhG3N/DODKr+YYgIiIiKhTmrc/UwcbTvsDMAARERFRJxhy+wMwABEREVEnbL7R/gxwM7z2B2AAIiIiog4qv1aHreK6P4bX/gAMQERERNRBmw9loqKmsf2JMMD2B2AAIiIiog5o3v4sNcC5P00YgIiIiKjdNh/MENufKYMMs/0BGICIiIioncqu1WLroSwAht3+AAxARERE1E5bDmYaRfsDMAARERFRO5Rdq8UWsf0xjDu+3w4DEBEREd3R5oOZqKypx0B3O0wZ5Cr1cO4aAxARERHdVvO5P4a67s+tGICIiIjotj751bjaH4ABiIiIiG6jtKoW2w5nATD8K7+aYwAiIiKiNjXN/RlkRO0PwABEREREbWje/iwJ7wuZzDjaH4ABiIiIiNrwycEMo2x/AAYgIiIiakVpVS22NVv12ZjaH4ABiIiIiFrxycEMVNU2YJC7HSYbWfsDMAARERHRLUqMvP0BGICIiIjoFp/82tj+DPYwzvYHYAAiIiKiZkqqarFdXPenn1G2PwADEBERETXT1P4M8bRD+EAXqYejMwxAREREBOCW9meS8bY/AAMQERER3fBxs/ZnkhG3PwADEBEREcG02h+AAYiIiIjQ2P5cq23AUE+l0bc/AAMQERGRySuurGl25ZdxrvtzKwYgIiIiE/fxr5li+zNxgPG3PwADEBERkUkrrqzBp0lZAEyn/QEYgIiIiEzaRzfm/gR4mU77AzAAERERmaziyhp8evgSANNqfwAGICIiIpP10a8ZuF7X2P5M6G867Q/AAERERGSSTLn9ARiAiIiITNJHBxrbn0ATbH8AiQPQgQMHMGPGDHh4eEAmk2HXrl233T8hIQEymazFIz8/X2u/mJgY+Pj4wMrKCqGhoUhOTtbhuyAiIjIsVytr8GlSU/tj/Ks+t0bSAFRVVYXAwEDExMR06HUXLlxAXl6e+HBxuZlcd+zYgaioKKxYsQInTpxAYGAgIiIiUFhY2NXDJyIiMkgfN2t/xvd3lno4kjCX8g+fNm0apk2b1uHXubi4oFevXq3+7L333sNTTz2FefPmAQA2btyIn376CVu2bMGyZcvuZrhEREQGj+1PI4OcAxQUFAR3d3dMnjwZhw4dErfX1tYiJSUF4eHh4jYzMzOEh4cjKSmpzePV1NRArVZrPYiIiIyROPdH1ctk2x/AwAKQu7s7Nm7ciG+//RbffvstVCoVxo8fjxMnTgAArl69ioaGBri6umq9ztXVtcU8oeZWrlwJpVIpPlQqlU7fBxERkRSumuiqz62R9Cuwjurfvz/69+8vPh89ejTS09Px/vvv47PPPuv0caOjoxEVFSU+V6vVDEFERGR0PjqQgeo6DYJUvTC+n+m2P4CBBaDWhISE4ODBgwAAJycnyOVyFBQUaO1TUFAANze3No+hUCigUCh0Ok4iIiIpFVWw/WnOoL4Ca01qairc3d0BAJaWlggODkZcXJz4c41Gg7i4OISFhUk1RCIiIsl9dCBdbH/GmXj7A0jcAFVWViItLU18npmZidTUVDg4OKB3796Ijo7GlStX8OmnnwIA1qxZA19fXwwePBjV1dX45JNPEB8fj19++UU8RlRUFCIjIzFixAiEhIRgzZo1qKqqEq8KIyIiMjVFFTX47IjprvrcGkkD0PHjxzFhwgTxedM8nMjISGzbtg15eXnIzs4Wf15bW4sXXngBV65cgY2NDQICArBv3z6tY8yaNQtFRUVYvnw58vPzERQUhNjY2BYTo4mIiEzFpkS2P7eSCYIgSD0IfaNWq6FUKlFeXg47Ozuph0NERNRphRXVuPft/aiu02D7EyFGHYA68vvb4OcAERERUds+Smy88mtY7164t6+T1MPRGwxARERERqqwohqfH+Wqz61hACIiIjJSm260P8PZ/rTAAERERGSECiuq8fkRtj9tYQAiIiIyQpsSM1BT39j+3MP2pwUGICIiIiPD9ufOGICIiIiMzMaExvYn2Nue7U8bGICIiIiMSKG6Gv85ylWf74QBiIiIyIhsTLzZ/oz1Z/vTFgYgIiIiI8H2p/0YgIiIiIzEhsR01NRrMILtzx0xABERERmBQnU1vjjaeANxXvl1ZwxARERERqB5+zPG31Hq4eg9BiAiIiIDV6Cuxn9utD9/m8z2pz0YgIiIiAzchoR01NZrMNLHHqP7sP1pDwYgIiIiA1agrsYXyZz701EMQERERAaM7U/nMAAREREZqPzym+3P39j+dAgDEBERkYHamNjY/oT4OCCM7U+HMAAREREZoObtD1d97jgGICIiIgO0ISGtsf3xZfvTGQxAREREBia/vBpfJucAYPvTWQxAREREBmZDQhpqG260P35sfzqDAYiIiMiA5JVfZ/vTBRiAiIiIDMiGhHTUNmgQ6uuA0X14x/fOYgAiIiIyEHnl1/GV2P70k3g0ho0BiIiIyEA0b3945dfdYQAiIiIyALllbH+6EgMQERGRAWhqf0b5sf3pCgxAREREei637Dp2HGtsf5ZMYvvTFRiAiIiI9Bzbn67HAERERKTHmrc/nPvTdRiAiIiI9NiHN1Z9DvNzxCiu+txlGICIiIj0lNbcn/C+Eo/GuDAAERER6amY/WmoaxDY/ugAAxAREZEeulJ2HV8fv3nPL+paDEBERER66MMb7c/oPo4IZfvT5SQNQAcOHMCMGTPg4eEBmUyGXbt23Xb/7777DpMnT4azszPs7OwQFhaGPXv2aO3z6quvQiaTaT0GDBigw3dBRETUtbTbH175pQuSBqCqqioEBgYiJiamXfsfOHAAkydPxu7du5GSkoIJEyZgxowZOHnypNZ+gwcPRl5envg4ePCgLoZPRESkE01zf8b4OyLE10Hq4Rglcyn/8GnTpmHatGnt3n/NmjVaz9966y18//33+OGHHzBs2DBxu7m5Odzc3Np93JqaGtTU1IjP1Wp1u19LRETUlS6XXsM3x7nqs64Z9BwgjUaDiooKODhop+OLFy/Cw8MDfn5+eOyxx5CdnX3b46xcuRJKpVJ8qFQqXQ6biIioTR8mpLP96QYGHYDeeecdVFZW4pFHHhG3hYaGYtu2bYiNjcWGDRuQmZmJe+65BxUVFW0eJzo6GuXl5eIjJyenO4ZPRESkpXn7w7k/uiXpV2B344svvsBrr72G77//Hi4uLuL25l+pBQQEIDQ0FN7e3vj6668xf/78Vo+lUCigUCh0PmYiIqLbidnf2P6M9XfCSB+2P7pkkAHoq6++wpNPPolvvvkG4eHht923V69e6NevH9LS0rppdERERB2nNfeH6/7onMF9Bfbll19i3rx5+PLLLzF9+vQ77l9ZWYn09HS4u7t3w+iIiIg6J2Z/Ouo1bH+6i6QNUGVlpVYzk5mZidTUVDg4OKB3796Ijo7GlStX8OmnnwJo/NorMjISa9euRWhoKPLz8wEA1tbWUCqVAIAXX3wRM2bMgLe3N3Jzc7FixQrI5XLMnj27+98gERFRO+SUNJ/7w/anO0jaAB0/fhzDhg0TL2GPiorCsGHDsHz5cgBAXl6e1hVcH330Eerr67Fw4UK4u7uLjyVLloj7XL58GbNnz0b//v3xyCOPwNHREUeOHIGzs3P3vjkiIqJ2+jAhDfUaAff0dcIItj/dQiYIgiD1IPSNWq2GUqlEeXk57OzspB4OEREZsZySa5jwTgLqNQL++2wYA9Bd6Mjvb4ObA0RERGRM2P5IgwGIiIhIIo1zfy4D4Nyf7sYAREREJJGY/Tfbn2Bvtj/diQGIiIhIAjkl1/DfFLY/UmEAIiIiksD6eLY/UmIAIiIi6mbZxdfw7Ymm9of3/JICAxAREVE3a5r7c28/ZwR720s9HJPEAERERNSNtNsfzv2RCgMQERFRN1q//yLqNQLG9XPG8N5sf6TCAERERNRNGtufKwB4x3epMQARERF1k/X7L6KB7Y9eYAAiIiLqBpeKq8T2h3N/pMcARERE1A3Wx6ehQSNgfH9nDGP7IzkGICIiIh27VFyF707emPszie2PPmAAIiIi0jG2P/qHAYiIiEiHsq7ebH+46rP+YAAiIiLSofX7G9ufCf2dEaTqJfVw6AYGICIiIh3JulqFnU1zf9j+6BUGICIiIh1ZF8/2R18xABEREelA1tUq7Erl3B99xQBERESkA03tz8QBLghk+6N3GICIiIi6WPP2h+v+6CcGICIioi72QXzjPb8msf3RWwxAREREXSjzahV2neQd3/UdAxAREVEXWhd/ERoBmDTABQFevaQeDrWBAYiIiKiLsP0xHAxAREREXWRdXGP7Ez6Q7Y++YwAiIiLqAhlFlc2u/OK6P/qOAYiIiKgLrI9PE9ufoV5KqYdDd8AAREREdJfS2f4YHAYgIiKiu3Sz/XFl+2MgGICIiIjuQnpRJb4X7/nFK78MBQMQERHRXWje/gzxZPtjKBiAiIiIOontj+FiACIiIuqkpnV/Jg9i+2NoGICIiIg6Ia2wEv/7LRcA7/huiDocgOrq6mBubo4zZ87c9R9+4MABzJgxAx4eHpDJZNi1a9cdX5OQkIDhw4dDoVDA398f27Zta7FPTEwMfHx8YGVlhdDQUCQnJ9/1WImIiJpruucX2x/D1OEAZGFhgd69e6OhoeGu//CqqioEBgYiJiamXftnZmZi+vTpmDBhAlJTU7F06VI8+eST2LNnj7jPjh07EBUVhRUrVuDEiRMIDAxEREQECgsL73q8REREANsfYyATBEHo6Is2b96M7777Dp999hkcHBy6ZiAyGXbu3ImZM2e2uc/f//53/PTTT1rt06OPPoqysjLExsYCAEJDQzFy5EisX78eAKDRaKBSqbB48WIsW7asXWNRq9VQKpUoLy+HnZ1d598UEREZpSVfncT3qbmYMsgVH80ZIfVw6IaO/P4278wfsH79eqSlpcHDwwPe3t6wtbXV+vmJEyc6c9g7SkpKQnh4uNa2iIgILF26FABQW1uLlJQUREdHiz83MzNDeHg4kpKS2jxuTU0NampqxOdqtbprB05EREZDq/3hlV8Gq1MB6HYtjS7l5+fD1dVVa5urqyvUajWuX7+O0tJSNDQ0tLrP+fPn2zzuypUr8dprr+lkzEREZFw+iLsIQQAiBrtisAfn/hiqTgWgFStWdPU4JBUdHY2oqCjxuVqthkqlknBERESkj9IKK/DDqcb253nO/TFonQpATVJSUnDu3DkAwODBgzFs2LAuGVRb3NzcUFBQoLWtoKAAdnZ2sLa2hlwuh1wub3UfNze3No+rUCigUCh0MmYiIjIeH8Slsf0xEp1aB6iwsBATJ07EyJEj8fzzz+P5559HcHAwJk2ahKKioq4eoygsLAxxcXFa2/bu3YuwsDAAgKWlJYKDg7X20Wg0iIuLE/chIiLqjIsFN9sf3vHd8HUqAC1evBgVFRU4e/YsSkpKUFJSgjNnzkCtVuP5559v93EqKyuRmpqK1NRUAI2XuaempiI7OxtA41dTc+bMEfd/9tlnkZGRgZdffhnnz5/Hhx9+iK+//hp/+9vfxH2ioqLw8ccfY/v27Th37hyee+45VFVVYd68eZ15q0RERACAD+Ib25+pg90wyINXCBu6Tn0FFhsbi3379mHgwIHitkGDBiEmJgZTpkxp93GOHz+OCRMmiM+b5uFERkZi27ZtyMvLE8MQAPj6+uKnn37C3/72N6xduxZeXl745JNPEBERIe4za9YsFBUVYfny5cjPz0dQUBBiY2NbTIwmIiJqr4sFFfiRc3+MSqfWAerZsyd+/fVXBAUFaW0/efIkxo0bZ/CXkXMdICIiam7xlyfxw2+5mDrYDRsfD5Z6ONSGjvz+7tRXYBMnTsSSJUuQm5srbrty5Qr+9re/YdKkSZ05JBERkV76o1n7w3V/jEenAtD69euhVqvh4+ODPn36oE+fPvD19YVarca6deu6eoxERESSaVr3Z9oQNwx057cCxqJTc4BUKhVOnDiBffv2iQsMDhw4sMUqzURERIbsj4IK/HQ6DwDn/hibDgeguro6WFtbIzU1FZMnT8bkyZN1MS4iIiLJsf0xXpLeDZ6IiEhfNW9/OPfH+HRqDtD/+3//D6+88gpKSkq6ejxERER6Ye2N9ue+oW4Y4Mb2x9gY1N3giYiIusOF/Ars5twfo2ZQd4MnIiLqDh+w/TF6HQ5A9fX1kMlkeOKJJ+Dl5aWLMREREUnmQn6zuT+855fR6vAcIHNzc/z73/9GfX29LsZDREQkqQ/iLgIApg91R3+3nhKPhnSl0ytBJyYmdvVYiIiIJNXU/shknPtj7Do1B2jatGlYtmwZTp8+jeDg4BaToB944IEuGRwREVF3Whv3BwDgPrY/Rq9TN0M1M2u7OJLJZAa/RhBvhkpEZHrO56sxdc2vkMmA2CX3MgAZoI78/u5UA6TRaDo1MCIiIn3VNPeH7Y9p6NAcoPvuuw/l5eXi81WrVqGsrEx8XlxcjEGDBnXZ4IiIiLrDuTw1dp/Oh0wGLOHcH5PQoQC0Z88e1NTUiM/feustrdWg6+vrceHCha4bHRERUTdofuVXP1e2P6agQwHo1ulCnZg+REREpFfO5anx85l8XvllYjp1GTwREZGxYPtjmjoUgGQyGWQyWYttREREhuj33JvtD+f+mJYOXQUmCALmzp0LhUIBAKiursazzz4rrgPUfH4QERGRvmtqf+4P8EBftj8mpUMBKDIyUuv5X//61xb7zJkz5+5GRERE1A1+z1Uj9uyNuT8T/aUeDnWzDgWgrVu36mocRERE3app1We2P6aJk6CJiMjknM0tx56zBTfm/rD9MUUMQEREZHKa5v7MCPCAvwvbH1PEAERERCalefvzPNsfk8UAREREJmXtPrY/xABEREQm5MyVcvzyewFXfSYGICIiMh1Nc38eCPSAv0sPiUdDUmIAIiIik9DU/pjJgMUT2f6YOgYgIiIyCWvZ/lAzDEBERGT0zlwpx94b7c8itj8EBiAiIjIBbH/oVgxARERk1Jq3P4t55RfdwABERERGbc2NdX8eDPJEH2e2P9SIAYiIiIzWmSvl2Heuae4PV32mmxiAiIjIaLH9obYwABERkVE6fflm+7OY7Q/dQi8CUExMDHx8fGBlZYXQ0FAkJye3ue/48eMhk8laPKZPny7uM3fu3BY/nzp1ane8FSIi0hNr4/4AAMwM8oQf2x+6hbnUA9ixYweioqKwceNGhIaGYs2aNYiIiMCFCxfg4uLSYv/vvvsOtbW14vPi4mIEBgbi4Ycf1tpv6tSp2Lp1q/hcoVDo7k0QEZFeaWx/Cjn3h9okeQP03nvv4amnnsK8efMwaNAgbNy4ETY2NtiyZUur+zs4OMDNzU187N27FzY2Ni0CkEKh0NrP3t6+O94OERHpgTX72P7Q7UkagGpra5GSkoLw8HBxm5mZGcLDw5GUlNSuY2zevBmPPvoobG1ttbYnJCTAxcUF/fv3x3PPPYfi4uI2j1FTUwO1Wq31ICIiw3Tqchnizhdy3R+6LUkD0NWrV9HQ0ABXV1et7a6ursjPz7/j65OTk3HmzBk8+eSTWtunTp2KTz/9FHFxcVi9ejUSExMxbdo0NDQ0tHqclStXQqlUig+VStX5N0VERJJae+PKr5nDPOHrZHuHvclUST4H6G5s3rwZQ4cORUhIiNb2Rx99VPznoUOHIiAgAH369EFCQgImTZrU4jjR0dGIiooSn6vVaoYgIiIDpNX+8J5fdBuSNkBOTk6Qy+UoKCjQ2l5QUAA3N7fbvraqqgpfffUV5s+ff8c/x8/PD05OTkhLS2v15wqFAnZ2dloPIiIyPGvY/lA7SRqALC0tERwcjLi4OHGbRqNBXFwcwsLCbvvab775BjU1NfjrX/96xz/n8uXLKC4uhru7+12PmYiI9NNvOWWIP18IuZkMz7P9oTuQ/CqwqKgofPzxx9i+fTvOnTuH5557DlVVVZg3bx4AYM6cOYiOjm7xus2bN2PmzJlwdHTU2l5ZWYmXXnoJR44cQVZWFuLi4vDggw/C398fERER3fKeiIio+zXd8X1mkCd82P7QHUg+B2jWrFkoKirC8uXLkZ+fj6CgIMTGxooTo7Ozs2Fmpp3TLly4gIMHD+KXX35pcTy5XI5Tp05h+/btKCsrg4eHB6ZMmYI33niDawERERmp1GbtD1d9pvaQCYIgSD0IfaNWq6FUKlFeXs75QEREBmDe1mTsv1CEh4K98M7DgVIPhyTSkd/fkn8FRkREdDdSc8qw/0IR5GYyLJrA9ofahwGIiIgM2tobqz7/aRjn/lD7MQAREZHBOpldKrY/nPtDHcEAREREBqvpyq8/D/OEtyPbH2o/BiAiIjJIJ7NLkdA094ftD3UQAxARERkktj90NxiAiIjI4Jxo1v7wnl/UGQxARERkcJru+P6X4Z7o7Wgj8WjIEDEAERGRQTmRXYrEP5rW/WH7Q53DAERERAZlDdsf6gIMQEREZDBSLpXiwB9FMGf7Q3eJAYiIiAxG05VffxnuxfaH7goDEBERGQSt9ofr/tBdYgAiIiKDsObGPb8eCvaCyoHtD90dBiAiItJ7KZdK8OvFqzA3k2Eh7/hOXYABiIiI9F7TlV9sf6irMAAREZFeY/tDusAAREREeq2p/Xl4BNsf6joMQEREpLeOZ91sfxaMZ/tDXYcBiIiI9FbTuj9sf6irMQAREZFeYvtDusQAREREeunm3B8V2x/qcgxARESkd45lleBgWtOVX32kHg4ZIQYgIiLSO02rPj88QgUve7Y/1PUYgIiISK8kZ5bgUFoxLORsf0h3GICIiEivrI1j+0O6xwBERER6o3n7s2A82x/SHQYgIiLSG5z7Q92FAYiIiPTC0YxiHE5vmvvDdX9ItxiAiIhILzSt+vzICBU8e1lLPBoydgxAREQkuebtzwK2P9QNGICIiEhyTas+zxrJ9oe6BwMQERFJ6khGMZIymq78YvtD3YMBiIiIJLW2WfvjwfaHugkDEBERSaap/bGUm7H9oW7FAERERJJpWveH7Q91NwYgIiKSRFJ6MY5klMBSbobnuOozdTO9CEAxMTHw8fGBlZUVQkNDkZyc3Oa+27Ztg0wm03pYWVlp7SMIApYvXw53d3dYW1sjPDwcFy9e1PXbICKiDmi65xfbH5KC5AFox44diIqKwooVK3DixAkEBgYiIiIChYWFbb7Gzs4OeXl54uPSpUtaP3/77bfxwQcfYOPGjTh69ChsbW0RERGB6upqXb8dIiJqh+btzwLe8Z0kIHkAeu+99/DUU09h3rx5GDRoEDZu3AgbGxts2bKlzdfIZDK4ubmJD1dXV/FngiBgzZo1+Mc//oEHH3wQAQEB+PTTT5Gbm4tdu3Z1wzsiIqI7aZr782iICu5Ktj/U/SQNQLW1tUhJSUF4eLi4zczMDOHh4UhKSmrzdZWVlfD29oZKpcKDDz6Is2fPij/LzMxEfn6+1jGVSiVCQ0PbPGZNTQ3UarXWg4iIdCMpvRhHMzn3h6QlaQC6evUqGhoatBocAHB1dUV+fn6rr+nfvz+2bNmC77//Hp9//jk0Gg1Gjx6Ny5cvA4D4uo4cc+XKlVAqleJDpVLd7VsjIqJWCIKA99n+kB6Q/CuwjgoLC8OcOXMQFBSEcePG4bvvvoOzszM2bdrU6WNGR0ejvLxcfOTk5HThiImIqElSRjGSb7Q/XPeHpCRpAHJycoJcLkdBQYHW9oKCAri5ubXrGBYWFhg2bBjS0tIAQHxdR46pUChgZ2en9SAioq4lCIJ4z6/ZISq4Ka3u8Aoi3ZE0AFlaWiI4OBhxcXHiNo1Gg7i4OISFhbXrGA0NDTh9+jTc3d0BAL6+vnBzc9M6plqtxtGjR9t9TCIi6nrN25/n2P6QxMylHkBUVBQiIyMxYsQIhISEYM2aNaiqqsK8efMAAHPmzIGnpydWrlwJAHj99dcxatQo+Pv7o6ysDP/+979x6dIlPPnkkwAarxBbunQp3nzzTfTt2xe+vr745z//CQ8PD8ycOVOqt0lEZNIEQcCavWx/SH9IHoBmzZqFoqIiLF++HPn5+QgKCkJsbKw4iTk7OxtmZjeLqtLSUjz11FPIz8+Hvb09goODcfjwYQwaNEjc5+WXX0ZVVRWefvpplJWVYezYsYiNjW2xYCIREXWPpPRiJGeVwNKc7Q/pB5kgCILUg9A3arUaSqUS5eXlnA9EBqHsWi1kMhmU1hZSD4WoBUEQMGvTESRnlWDuaB+8+sBgqYdERqojv78lb4CIqPPSCivxYUIavk/NhUYQMMDNDqG+Dhjl54AQX0c42FpKPUQiHNZqf7juD+kHBiAiA3Q+X4318Wn46XQemne45/LUOJenxrbDWQCAfq49EOrriFA/B4T6OsK5p0KaAZPJarzyq3Hdn/8L6Q1XO05FIP3AAERkQE5dLsO6+DTs/f3mMg+TB7li0QR/uPeyQnJmCY5mlOBoZjH+KKgUH58dabxfnp+zLUJ9HTHqRiDiRFTStcPpxTiWVcr2h/QOAxCRAUi5VIIP4tKQ+EcRAEAmA+4b6o5FE/wx0P3m99z3B3jg/gAPAEBxZQ2OZZXgSEYJjmaW4Hy+GhlFVcgoqsKXydkAAG9HG4T6OogtkZe9Tfe/OTJabH9In3ESdCs4CZr0gSAISMooxrq4NCRlFAMA5GYyPBjogQUT+sDfpWeHjld2rbaxIcpsbIh+z1VDc8u//Z69rDHKrzEMjfJ1hMrBGjKZrKveEpmYgxev4q+bj0JhboYDL09gACKd4yRoIgMmCAIS/ijC+vg0pFwqBQBYyGX4y3AvPDe+D7wdbTt13F42lpgy2A1TBjeuiK6ursPxrMavzI5kluDMlXJcKbuOb09cxrcnGu+t5660amyI/BwR6usAXydbBiJqF632J5TtD+kfNkCtYANEUtBoBOw9V4D18Wk4faUcAGBpboZHR6rwzLg+8Oyl25tGVtbUI+VSKY5mNN6p+9TlMtQ1aP/nwbmnQgxEo3wd4O/Sg4GIWtW8/fn15QlwYQCibsAGiMiANGgE7D6dh5j9aTifXwEAsLaQ47HQ3nj6Xr9u+8XRQ2GOcf2cMa6fMwDgem0DTmQ3BqIjmSVIzSlDUUUNfjyVhx9P5QEAHG0tEeLrIIai/q49YWbGQGTqbm1/GH5IHzEAEUmkvkGD71NzEZOQhoyiKgCNISRytDeeGOMLxx7SXrJubSnHGH8njPF3AgBU1zUgNadMvMrsRHYpiqtq8fOZfPx8Jh8A0MvGAiN9HG6sReSIge52kDMQmZyDaVdx/FIpFOZmeG4cr/wi/cQARNTNaus1+PbEZXyYkIackusAAKW1BZ4Y44u5o32gtNHP1ZytLOQY5eeIUX6OAPqitl6DU5fLcDSzBEcyipFyqRRl1+qw9/cC8TL9nlbmYiAK9XPEEA87mMslvQcz6VjzO74/FurN9of0FucAtYJzgEgXqusasONYDjYmpiOvvBpA41dIT97jh7+O6o2eVvoZfNqrrkGDM1fKG68yyyjG8axSVNTUa+1jaylHsNgQOWCoZy9YmjMQGZNfLxbh8c3JnPtDkuAcICI9UlVTjy+OZuOjXzNQVFEDAHDpqcAz4/pgdogKNpbG8a+hhdwMw3rbY1hvezw7rg/qGzQ4l1eBo5nFOJJRguTMYqir63HgjyIcuLGekbWFHMHe9mJDFKhSQmEul/idUGcJgoD39zbO/WH7Q/qODVAr2ABRV1BX1+GzpEv45NcMlF6rA9C4zs6z4/vg4WAvWFmY1i/6Bo2A8/lqcQ5RcmaJeF6aKMzNMKx3L3FhxuG97U3uPBmyA38UYc6WG+3P3yfApScDEHUvNkBEEiq7Vosth7Kw7VAm1NWNXwF5O9pg4Xh/zBzmabJf+cjNZBjsocRgDyWeGOsLjUbAxcJKHM0sFkPR1cpaHMloXL0acYCl3AyBKqUYiIK97Y2mMTM2za/8+usob4Yf0ntsgFrBBog642plDT7+NQOfJ11CVW0DAMDfpQcWTfDH/QHunPx7B4IgIL2oSisQFahrtPYxN5NhqNfNQDTC297g504ZC7Y/pA868vubAagVDEDUEfnl1dh0IB1fJmejuk4DABjobofFE/0xdbAb18XpJEEQcKn4WrNAVIIrZde19jGTAUM8leL9zEb6OkBpzUDU3QRBwJ83HMbJ7DLMH+uLf94/SOohkYliALpLDEDUHjkl17AxMR3fHL+M2obG4BOo6oXnJ/pj4gAXrpCsAzkl18SrzI5mliC75JrWz2UyYKCbHUJv3O0+1NcB9raWEo3WdCT+UYTILcmwsmi85xfbH5IK5wAR6VDm1Sp8uD8NO09eQf2Nu4mG+Dhg8SR/jPV3YvDRIZWDDVQONngo2AsAkFd+Xfy67GhGCTKuVuH3PDV+z1Nj66EsAMAAt57iVWYhvg5wkniBSWOjNfcnlHN/yHCwAWoFGyBqzR8FFYjZn4YffssV76I+1t8Jiyf6I9TPUdrBEQCgUF0t3u3+aEYJLhZWttjH36WH1v3MeKn23Um4UIi5W4/BysIMv748Ec49GTBJOmyAiLrQmSvliNmfJt7uAQAmDXDBwon+GN7bXsKR0a1c7KwwI9ADMwI9ADROTD+WWSKuVn0+vwJphZVIK6zEf45mAwB8nWxvBKLGr808dHzTWWPSfNXnx0d5M/yQQWED1Ao2QAQAJ7NLsT4+DXHnC8VtUwe7YdFEfwzxVEo4Muqs0qpaJGeViF+b/Z6nxq3/BVQ5WIvzh0b5OcLL3ppfa7aB7Q/pGzZARHfhaEYx1sWn4WDaVQCNVxrNCPTAwgn+6OfaU+LR0d2wt7VExGA3RAx2AwCUX6/D8awScWL1mVw1ckquI6fkMv6bchkA4KG0Qqifo/i1mY+jDQMR2P6Q4WMD1Ao2QKZHEAQcTLuKdXFpSM4qAdC45syfhnniufF94OfcQ+IRUneorKnXCkSnLpeLE92buNopECI2RA7o49zDJAPR/guFmMf2h/QMGyCidhIEAfHnC7EuPg2pOWUAGlcffniEF54d1wcqBxtpB0jdqofCHOP7u2B8fxcAwLXaepy4VCZOqk7NKUOBugY//JaLH37LBQA49bBEyI11iEL9HNDPpafRr/3UvP2ZE+bD8EMGiQGITJJGIyD2bD7WxafhXJ4aQON9qP4vtDeevtcP7kpOhCXAxtIcY/s6YWxfJwBAdV0DTmbfDEQnsktxtbIWu0/nY/fpxkny9jYWWoFooJud0QWihD+K8FtOGawszPD0vX5SD4eoUxiAyKTUN2jw46k8xOxPEy+RtrWU469h3nhyrB//Jku3ZWUhR1gfR4T1aVz2oKa+Aacul4sLMx7PKkXptTrsOVuAPWcLAAB2VuZagWiQu51B3xZFEASsuXHH9zlhPlxXiQwWAxCZhLoGDXaeuIIPE9KQVdy4enBPK3PMG+2DeWN8uVowdYrCXI6RPg4Y6eOARWj8nJ2+Ui5eZXY8qxTq6nrsO1eIfecarybsoTDHCB97MRAN9VTCwoACUcKFIvx2uRzWFnK2P2TQGIDIqFXXNeCblMvYmJAu3kfK3sYC88f6Ys5oH9jxRprUhSzkZhje2x7De9vjufF9UN+gwe95ajEQJWeWQF1dj4QLRUi4UAQAsLGUI9jbXrzKLMBLCYW5XOJ30rrmqz7PCfNm+0MGjVeBtYJXgRm+67UN+CI5Gx8dSBfvKO7UQ4Fn7vXD/4X2hq2C2Z+6X4NGwLk8tXiVWXJWCcqu1WntozBvDFFNCzMO690LVhb6EYj2ny/EvG3HYG0hx69/n8AARHqHV4GRyaqsqcdnSZfwya8ZKK6qBQC4K63w7Lg+mDVSpTe/SMg0yc1kGOKpxBBPJeaP9YVGI+CPwgqt+5kVV9UiKaMYSRnFAC7CUm6GIFUvhPo1Lsw4vLc9rC27/3MsCALeZ/tDRoQNUCvYABme8mt12HY4C1sOZaL8euPfqFUO1lgw3h9/Hu6pt18pEDUnCALSiypxJOPmWkSFFTVa+1jIZQjw6iV+ZRbsbY8e3dBoxp8vwBPbjsPaQo6Df58ARwYg0kMd+f3NANQKBiDDUVxZgy2HMvHp4UuoqKkHAPg522LheH88EORhUJNLiW4lCAKyiq+JV5kdzShGbnm11j5NrdKoG/czG+Hj0OVz2wRBwIMxh3DqcjmeGeeH6GkDu/T4RF2FAeguMQDpv0J1NT7+NQOfH8nG9boGAEB/155YNNEf9w11h9zI1l0hAhqDyOXS6zjSFIgyi5FTcl1rHzMZMMjDTryfWYivA3rZ3N1Vjk3tj42lHL++zPaH9BfnAJHRyi27jk2J6fjyWA5q6zUAgKGeSiya6I/JA12NbsE5ouZkMhlUDjZQOdjg4REqAI3/TjTNHzqaWYLMq1U4c0WNM1fU2HwwEzJZ418ORvndDEQdCTC3rvrM8EPGgg1QK9gA6Z/s4mv4MCEN3564jLqGxo/s8N69sHhSX4zv52yS92Iiak2Bulr8uuxIRjHSi6pa7NPXpYd4lVmonwNcelq1eby4cwWYv53tDxkGNkBkNNIKK/Hh/jR8/1suGm7clDLMzxGLJ/ojrI8jgw/RLVztrPBAoAceCPQAABRV1CA58+ZVZhcKKnCxsBIXCyvx+ZFsAICfk61WIGq6FQzbHzJmbIBawQZIeufy1Fi/Pw27T+eh6RM6rp8zFk/0xwgfB2kHR2TASqpqtQLRuXw1bv0t0NvBBqG+DnDqqcCGhHTYWMpx8O8T4cAV00nPGVwDFBMTg3//+9/Iz89HYGAg1q1bh5CQkFb3/fjjj/Hpp5/izJkzAIDg4GC89dZbWvvPnTsX27dv13pdREQEYmNjdfcmqEuculyGdfFp2Pt7gbht8iBXLJ7ojwCvXtINjMhIONhaYuoQN0wd4gagcQmJY1k3AlFmCc5cKUd2yTVkl1wTXxM52ofhh4yO5AFox44diIqKwsaNGxEaGoo1a9YgIiICFy5cgIuLS4v9ExISMHv2bIwePRpWVlZYvXo1pkyZgrNnz8LT01Pcb+rUqdi6dav4XKFgdavPjmeVYF18GhL/aLw9gEwG3DfUHYsm+GOgO1s4Il1R2lggfJArwge5AgAqqutw/FKpuDijXCbD0/fwnl9kfCT/Ciw0NBQjR47E+vXrAQAajQYqlQqLFy/GsmXL7vj6hoYG2NvbY/369ZgzZw6AxgaorKwMu3btatcYampqUFNzc7ExtVoNlUrFr8B0TBAEJKUXY1182o1VbxvXNHkwyAMLxvvD36WHxCMkIiJDYjBfgdXW1iIlJQXR0dHiNjMzM4SHhyMpKaldx7h27Rrq6urg4KA9LyQhIQEuLi6wt7fHxIkT8eabb8LR0bHVY6xcuRKvvfZa598IdYggCEj4owjr49OQcqkUQOPqtg8Fe+HZcX3g7Wgr8QiJiMjYSRqArl69ioaGBri6umptd3V1xfnz59t1jL///e/w8PBAeHi4uG3q1Kn485//DF9fX6Snp+OVV17BtGnTkJSUBLm85S0RoqOjERUVJT5vaoCoa2k0AvaeK8D6+DScvlIOALA0N8PskSo8Pa4PPHtZSzxCIiIyFZLPAbobq1atwldffYWEhARYWd1cx+LRRx8V/3no0KEICAhAnz59kJCQgEmTJrU4jkKh4BwhHWrQCNh9Og8x+9NwPr8CAGBtIcdfR/XGU/f4wcWu7TVIiIiIdEHSAOTk5AS5XI6CggKt7QUFBXBzc7vta9955x2sWrUK+/btQ0BAwG339fPzg5OTE9LS0loNQKQbdQ0a/C81FzEJaci4sRhbD4U5Ikd7Y/5YP15VQkREkpE0AFlaWiI4OBhxcXGYOXMmgMZJ0HFxcVi0aFGbr3v77bfxr3/9C3v27MGIESPu+OdcvnwZxcXFcHd376qh023U1Dfg25Qr2JCYJt6nSGltgSfG+GLuaB8obbr2Ro1EREQdJflXYFFRUYiMjMSIESMQEhKCNWvWoKqqCvPmzQMAzJkzB56enli5ciUAYPXq1Vi+fDm++OIL+Pj4ID8/HwDQo0cP9OjRA5WVlXjttdfwl7/8BW5ubkhPT8fLL78Mf39/RERESPY+TUF1XQO+Ss7GpgMZyLtxx2pHW0s8eY8fHg/zRg+F5B83IiIiAHoQgGbNmoWioiIsX74c+fn5CAoKQmxsrDgxOjs7G2ZmZuL+GzZsQG1tLR566CGt46xYsQKvvvoq5HI5Tp06he3bt6OsrAweHh6YMmUK3njjDc7z0ZGqmnr85+glfHQgE1crG5cTcLVT4Jl7+2B2SG9YW7aceE5ERCQlydcB0ke8FUb7qKvr8OnhLGw+mInSa3UAAM9e1nhufB88FOwFKwsGHyIi6j4Gsw4QGabSqlpsPZSJrYezUFFdDwDwcbTBggn++NMwT1jIze5wBCIiImkxAFG7FVXU4JODGfg86RKqahsAAH1demDRRH9MH+oOcwYfIiIyEAxAdEf55dXYdCAdXyZno7pOAwAY5G6HxRP9ETHYDWZmMolHSERE1DEMQNSmnJJr2JiYjm+OX0ZtQ2PwCVT1wvMT/TFxgAtkMgYfIiIyTAxA1ELm1SrE7E/DrpNXUK9pnCMf4uOAxZP8MdbficGHiIgMHgMQif4oqMD6+DT8eCoXN3IP7unrhEUT/BHq1/qNZImIiAwRAxDhzJVyrI9PQ+zZfHHbpAEuWDjRH8N720s4MiIiIt1gADJhJ7JLsT4+DfHnC8Vt04a4YeEEfwzxVEo4MiIiIt1iADJBRzKKsT4+DQfTrgIAzGTAjEAPLJzgj36uPSUeHRERke4xAJkIQRDw68WrWB+fhuSsEgCAuZkMfxrmiQUT/OHrZCvxCImIiLoPA5CREwQBcecKsW5/Gn7LKQMAWMrN8PAILzw7rg9UDjbSDpCIiEgCDEBGSqMREHs2H+vi03AuTw0AsLIww/+FeOPpe/3gprSSeIRERETSYQAyMvUNGvx4Kg/r96chrbASAGBrKcfjYT6YP9YXzj0VEo+QiIhIegxARqK2XoNdJ6/gw4Q0ZBVfAwD0tDLHvDG+mDfaB/a2lhKPkIiISH8wABm46roGfJNyGRsT0nGl7DoAwN7GAk/e44fHw7xhZ2Uh8QiJiIj0DwOQgbpe24AvkrPx0YF0FKhrAABOPRR45l4//F9ob9gq+H8tERFRW/hb0sBUVNfhsyOXsPnXTBRX1QIA3JVWeHZcH8waqYKVhVziERIREek/BiADUX6tDlsPZ2LroSyUX68DAKgcrLFgvD/+MtwLluZmEo+QiIjIcDAA6bniyhpsPpiJT5MuobKmHgDg52yLRRP88UCgB8zlDD5EREQdxQCkpwrV1fjoQAb+czQb1+saAAAD3Hpi0UR/TBviDrmZTOIREhERGS4GID1zpew6NiWm46tjOait1wAAhnoqsXiiP8IHusKMwYeIiOiuMQDpiUvFVdiQkI5vT1xGXYMAAAj2tsfiif4Y188ZMhmDDxERUVdhAJJYWmEFPtyfju9/y0WDpjH4hPk5YvEkf4T5OTL4EBER6QADkETO5amxPj4Nu8/kQWjMPRjf3xmLJvhjhI+DtIMjIiIycgxA3ey3nDKsi0/DvnMF4rYpg1yxaKI/Arx6STcwIiIiE8IA1I3e/PF3fHIwEwAgkwHTh7pj4QR/DHS3k3hkREREpoUBqBuF+Dpg6+EsPBjkgQXj/eHv0kPqIREREZkkBqBuFD7QFQkvjofKwUbqoRAREZk0LiPcjczMZAw/REREeoABiIiIiEwOAxARERGZHAYgIiIiMjkMQERERGRyGICIiIjI5DAAERERkclhACIiIiKToxcBKCYmBj4+PrCyskJoaCiSk5Nvu/8333yDAQMGwMrKCkOHDsXu3bu1fi4IApYvXw53d3dYW1sjPDwcFy9e1OVbICIiIgMieQDasWMHoqKisGLFCpw4cQKBgYGIiIhAYWFhq/sfPnwYs2fPxvz583Hy5EnMnDkTM2fOxJkzZ8R93n77bXzwwQfYuHEjjh49CltbW0RERKC6urq73hYRERHpMZkgCIKUAwgNDcXIkSOxfv16AIBGo4FKpcLixYuxbNmyFvvPmjULVVVV+PHHH8Vto0aNQlBQEDZu3AhBEODh4YEXXngBL774IgCgvLwcrq6u2LZtGx599NE7jkmtVkOpVKK8vBx2drxRKRERkSHoyO9vSRug2tpapKSkIDw8XNxmZmaG8PBwJCUltfqapKQkrf0BICIiQtw/MzMT+fn5WvsolUqEhoa2ecyamhqo1WqtBxERERkvSQPQ1atX0dDQAFdXV63trq6uyM/Pb/U1+fn5t92/6X87csyVK1dCqVSKD5VK1an3Q0RERIZB8jlA+iA6Ohrl5eXiIycnR+ohERERkQ6ZS/mHOzk5QS6Xo6CgQGt7QUEB3NzcWn2Nm5vbbfdv+t+CggK4u7tr7RMUFNTqMRUKBRQKhfi8aVoUvwojIiIyHE2/t9szvVnSAGRpaYng4GDExcVh5syZABonQcfFxWHRokWtviYsLAxxcXFYunSpuG3v3r0ICwsDAPj6+sLNzQ1xcXFi4FGr1Th69Ciee+65do2roqICAPhVGBERkQGqqKiAUqm87T6SBiAAiIqKQmRkJEaMGIGQkBCsWbMGVVVVmDdvHgBgzpw58PT0xMqVKwEAS5Yswbhx4/Duu+9i+vTp+Oqrr3D8+HF89NFHAACZTIalS5fizTffRN++feHr64t//vOf8PDwEEPWnXh4eCAnJwc9e/aETCbr0verVquhUqmQk5PDK8zugOeq/Xiu2o/nqv14rtqP56r9dHmuBEFARUUFPDw87riv5AFo1qxZKCoqwvLly5Gfn4+goCDExsaKk5izs7NhZnZzqtLo0aPxxRdf4B//+AdeeeUV9O3bF7t27cKQIUPEfV5++WVUVVXh6aefRllZGcaOHYvY2FhYWVm1a0xmZmbw8vLq2jd6Czs7O/5L0k48V+3Hc9V+PFftx3PVfjxX7aerc3Wn5qeJ5OsAmRquMdR+PFftx3PVfjxX7cdz1X48V+2nL+eKV4ERERGRyWEA6mYKhQIrVqzQuuqMWsdz1X48V+3Hc9V+PFftx3PVfvpyrvgVGBEREZkcNkBERERkchiAiIiIyOQwABEREZHJYQAiIiIik8MA1IUOHDiAGTNmwMPDAzKZDLt27brjaxISEjB8+HAoFAr4+/tj27ZtOh+nPujouUpISIBMJmvxyM/P754BS2jlypUYOXIkevbsCRcXF8ycORMXLly44+u++eYbDBgwAFZWVhg6dCh2797dDaOVVmfO1bZt21p8rtq7aKoh27BhAwICAsTF6MLCwvDzzz/f9jWm+JkCOn6uTPUz1ZpVq1aJd2i4HSk+WwxAXaiqqgqBgYGIiYlp1/6ZmZmYPn06JkyYgNTUVCxduhRPPvkk9uzZo+ORSq+j56rJhQsXkJeXJz5cXFx0NEL9kZiYiIULF+LIkSPYu3cv6urqMGXKFFRVVbX5msOHD2P27NmYP38+Tp48iZkzZ2LmzJk4c+ZMN468+3XmXAGNK9I2/1xdunSpm0YsHS8vL6xatQopKSk4fvw4Jk6ciAcffBBnz55tdX9T/UwBHT9XgGl+pm517NgxbNq0CQEBAbfdT7LPlkA6AUDYuXPnbfd5+eWXhcGDB2ttmzVrlhAREaHDkemf9pyr/fv3CwCE0tLSbhmTPissLBQACImJiW3u88gjjwjTp0/X2hYaGio888wzuh6eXmnPudq6daugVCq7b1B6zN7eXvjkk09a/Rk/U9pud674mRKEiooKoW/fvsLevXuFcePGCUuWLGlzX6k+W2yAJJSUlITw8HCtbREREUhKSpJoRPovKCgI7u7umDx5Mg4dOiT1cCRRXl4OAHBwcGhzH362GrXnXAFAZWUlvL29oVKp7vg3e2PU0NCAr776ClVVVQgLC2t1H36mGrXnXAH8TC1cuBDTp09v8ZlpjVSfLclvhmrK8vPzxZu+NnF1dYVarcb169dhbW0t0cj0j7u7OzZu3IgRI0agpqYGn3zyCcaPH4+jR49i+PDhUg+v22g0GixduhRjxozRugHwrdr6bJnCnKkm7T1X/fv3x5YtWxAQEIDy8nK88847GD16NM6ePavzmyJL7fTp0wgLC0N1dTV69OiBnTt3YtCgQa3ua+qfqY6cK1P+TAHAV199hRMnTuDYsWPt2l+qzxYDEBmE/v37o3///uLz0aNHIz09He+//z4+++wzCUfWvRYuXIgzZ87g4MGDUg9F77X3XIWFhWn9TX706NEYOHAgNm3ahDfeeEPXw5RU//79kZqaivLycvz3v/9FZGQkEhMT2/zFbso6cq5M+TOVk5ODJUuWYO/evXo/8ZsBSEJubm4oKCjQ2lZQUAA7Ozu2P+0QEhJiUkFg0aJF+PHHH3HgwIE7/i2yrc+Wm5ubLoeoNzpyrm5lYWGBYcOGIS0tTUej0x+Wlpbw9/cHAAQHB+PYsWNYu3YtNm3a1GJfU/9MdeRc3cqUPlMpKSkoLCzUauYbGhpw4MABrF+/HjU1NZDL5VqvkeqzxTlAEgoLC0NcXJzWtr179972e2W6KTU1Fe7u7lIPQ+cEQcCiRYuwc+dOxMfHw9fX946vMdXPVmfO1a0aGhpw+vRpk/hs3Uqj0aCmpqbVn5nqZ6ottztXtzKlz9SkSZNw+vRppKamio8RI0bgscceQ2pqaovwA0j42dLpFGsTU1FRIZw8eVI4efKkAEB47733hJMnTwqXLl0SBEEQli1bJjz++OPi/hkZGYKNjY3w0ksvCefOnRNiYmIEuVwuxMbGSvUWuk1Hz9X7778v7Nq1S7h48aJw+vRpYcmSJYKZmZmwb98+qd5Ct3nuuecEpVIpJCQkCHl5eeLj2rVr4j6PP/64sGzZMvH5oUOHBHNzc+Gdd94Rzp07J6xYsUKwsLAQTp8+LcVb6DadOVevvfaasGfPHiE9PV1ISUkRHn30UcHKyko4e/asFG+h2yxbtkxITEwUMjMzhVOnTgnLli0TZDKZ8MsvvwiCwM9Ucx09V6b6mWrLrVeB6ctniwGoCzVdqn3rIzIyUhAEQYiMjBTGjRvX4jVBQUGCpaWl4OfnJ2zdurXbxy2Fjp6r1atXC3369BGsrKwEBwcHYfz48UJ8fLw0g+9mrZ0nAFqflXHjxonnrsnXX38t9OvXT7C0tBQGDx4s/PTTT907cAl05lwtXbpU6N27t2BpaSm4uroK9913n3DixInuH3w3e+KJJwRvb2/B0tJScHZ2FiZNmiT+QhcEfqaa6+i5MtXPVFtuDUD68tmSCYIg6LZjIiIiItIvnANEREREJocBiIiIiEwOAxARERGZHAYgIiIiMjkMQERERGRyGICIiIjI5DAAERERkclhACIiIiKTwwBERJLIysqCTCZDamqq1EMRnT9/HqNGjYKVlRWCgoKkHg4R6RADEJGJmjt3LmQyGVatWqW1fdeuXZDJZBKNSlorVqyAra0tLly40OLmjE2aztutj6640/f48eOxdOnSuz4OEd0ZAxCRCbOyssLq1atRWloq9VC6TG1tbadfm56ejrFjx8Lb2xuOjo5t7jd16lTk5eVpPTpz53lduZtzQGQqGICITFh4eDjc3NywcuXKNvd59dVXW3wdtGbNGvj4+IjP586di5kzZ+Ktt96Cq6srevXqhddffx319fV46aWX4ODgAC8vL2zdurXF8c+fP4/Ro0fDysoKQ4YMQWJiotbPz5w5g2nTpqFHjx5wdXXF448/jqtXr4o/Hz9+PBYtWoSlS5fCyckJERERrb4PjUaD119/HV5eXlAoFAgKCkJsbKz4c5lMhpSUFLz++uuQyWR49dVX2zwnCoUCbm5uWg+5XI733nsPQ4cOha2tLVQqFRYsWIDKykqt1x46dAjjx4+HjY0N7O3tERERgdLSUsydOxeJiYlYu3at2CplZWUBABITExESEgKFQgF3d3csW7YM9fX1tz0HgiDg1VdfRe/evaFQKODh4YHnn3++zfdEZGoYgIhMmFwux1tvvYV169bh8uXLd3Ws+Ph45Obm4sCBA3jvvfewYsUK3H///bC3t8fRo0fx7LPP4plnnmnx57z00kt44YUXcPLkSYSFhWHGjBkoLi4GAJSVlWHixIkYNmwYjh8/jtjYWBQUFOCRRx7ROsb27dthaWmJQ4cOYePGja2Ob+3atXj33Xfxzjvv4NSpU4iIiMADDzyAixcvAgDy8vIwePBgvPDCC8jLy8OLL77Y4XNgZmaGDz74AGfPnsX27dsRHx+Pl19+Wfx5amoqJk2ahEGDBiEpKQkHDx7EjBkz0NDQgLVr1yIsLAxPPfWU2CqpVCpcuXIF9913H0aOHInffvsNGzZswObNm/Hmm2/e9hx8++23eP/997Fp0yZcvHgRu3btwtChQzv8noiMls7vN09EeikyMlJ48MEHBUEQhFGjRglPPPGEIAiCsHPnTqH5fxpWrFghBAYGar32/fffF7y9vbWO5e3tLTQ0NIjb+vfvL9xzzz3i8/r6esHW1lb48ssvBUEQhMzMTAGAsGrVKnGfuro6wcvLS1i9erUgCILwxhtvCFOmTNH6s3NycgQAwoULFwRBEIRx48YJw4YNu+P79fDwEP71r39pbRs5cqSwYMEC8XlgYKCwYsWK2x4nMjJSkMvlgq2trfh46KGHWt33m2++ERwdHcXns2fPFsaMGdPmsceNGycsWbJEa9srr7wi9O/fX9BoNOK2mJgYoUePHuL5bu0cvPvuu0K/fv2E2tra274fIlPFBoiIsHr1amzfvh3nzp3r9DEGDx4MM7Ob/0lxdXXVahzkcjkcHR1RWFio9bqwsDDxn83NzTFixAhxHL/99hv279+PHj16iI8BAwYAaJyv0yQ4OPi2Y1Or1cjNzcWYMWO0to8ZM6ZT73nChAlITU0VHx988AEAYN++fZg0aRI8PT3Rs2dPPP744yguLsa1a9cA3GyAOuLcuXMICwvTmpg+ZswYVFZWarVpt56Dhx9+GNevX4efnx+eeuop7Ny5U+trMyJTxwBERLj33nsRERGB6OjoFj8zMzODIAha2+rq6lrsZ2FhofVcJpO1uk2j0bR7XJWVlZgxY4ZW2EhNTcXFixdx7733ivvZ2tq2+5hdwdbWFv7+/uLD3d0dWVlZuP/++xEQEIBvv/0WKSkpiImJAXBzUrK1tbVOx9ScSqXChQsX8OGHH8La2hoLFizAvffe2+r/d0SmiAGIiAAAq1atwg8//ICkpCSt7c7OzsjPz9cKQV25ds+RI0fEf66vr0dKSgoGDhwIABg+fDjOnj0LHx8frcDh7+/fodBjZ2cHDw8PHDp0SGv7oUOHMGjQoC55HykpKdBoNHj33XcxatQo9OvXD7m5uVr7BAQEtHl5PQBYWlqioaFBa9vAgQORlJSkdf4PHTqEnj17wsvL67Zjsra2xowZM/DBBx8gISEBSUlJOH36dCfeHZHxYQAiIgDA0KFD8dhjj4lf5zQZP348ioqK8PbbbyM9PR0xMTH4+eefu+zPjYmJwc6dO3H+/HksXLgQpaWleOKJJwAACxcuRElJCWbPno1jx44hPT0de/bswbx581oEhTt56aWXsHr1auzYsQMXLlzAsmXLkJqaiiVLlnTJ+/D390ddXR3WrVuHjIwMfPbZZy0mZEdHR+PYsWNYsGABTp06hfPnz2PDhg3iVW0+Pj44evQosrKycPXqVWg0GixYsAA5OTlYvHgxzp8/j++//x4rVqxAVFSU1leOt9q2bRs2b96MM2fOICMjA59//jmsra3h7e3dJe+XyNAxABGR6PXXX2/xFdXAgQPx4YcfIiYmBoGBgUhOTu7UFVJtWbVqFVatWoXAwEAcPHgQ//vf/+Dk5AQAYmvT0NCAKVOmYOjQoVi6dCl69ep121/+rXn++ecRFRWFF154AUOHDkVsbCz+97//oW/fvl3yPgIDA/Hee+9h9erVGDJkCP7zn/+0WF6gX79++OWXX/Dbb78hJCQEYWFh+P7772Fubg4AePHFFyGXyzFo0CA4OzsjOzsbnp6e2L17N5KTkxEYGIhnn30W8+fPxz/+8Y/bjqdXr174+OOPMWbMGAQEBGDfvn344Ycfbru+EZEpkQm3frlPREREZOTYABEREZHJYQAiIiIik8MARERERCaHAYiIiIhMDgMQERERmRwGICIiIjI5DEBERERkchiAiIiIyOQwABEREZHJYQAiIiIik8MARERERCbn/wMw5PbYwpNNqgAAAABJRU5ErkJggg==",
"text/plain": [
"