From 9ba0ec068b767774f8666e4a7bc054c53235f8b2 Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 10:58:07 -0400
Subject: [PATCH 1/9] shock configuration now refers to calibration variables
 by name; tests don't pass

---
 HARK/models/consumer.py | 25 ++++++++++++-------------
 1 file changed, 12 insertions(+), 13 deletions(-)

diff --git a/HARK/models/consumer.py b/HARK/models/consumer.py
index 148c81457..61d2afb5a 100644
--- a/HARK/models/consumer.py
+++ b/HARK/models/consumer.py
@@ -7,28 +7,26 @@
 Microeconomic Dynamic Stochastic Optimization Problems"
 """
 
-# TODO: Include these in calibration, then construct shocks
-LivPrb = 0.98
-TranShkStd = 0.1
-RiskyStd = 0.1
-
 calibration = {
     "DiscFac": 0.96,
     "CRRA": 2.0,
     "R": 1.03,  # note: this can be overriden by the portfolio dynamics
     "Rfree": 1.03,
     "EqP": 0.02,
-    "LivPrb": LivPrb,
+    "LivPrb": 0.98,
     "PermGroFac": 1.01,
     "BoroCnstArt": None,
+    "TranShkStd" : 0.1,
+    "LivPrb" : 0.98,
+    "RiskyStd" : 0.1
 }
 
 consumption_block = DBlock(
     **{
         "name": "consumption",
         "shocks": {
-            "live": Bernoulli(p=LivPrb),  # Move to tick or mortality block?
-            "theta": MeanOneLogNormal(sigma=TranShkStd),
+            "live": (Bernoulli, {'p' : 'LivPrb'}),  # Move to tick or mortality block?
+            "theta": (MeanOneLogNormal, {'sigma' : 'TranShkStd'}),
         },
         "dynamics": {
             "b": lambda k, R: k * R,
@@ -46,8 +44,8 @@
     **{
         "name": "consumption normalized",
         "shocks": {
-            "live": Bernoulli(p=LivPrb),  # Move to tick or mortality block?
-            "theta": MeanOneLogNormal(sigma=TranShkStd),
+            "live": (Bernoulli, {"p" : "LivPrb"}),  # Move to tick or mortality block?
+            "theta": (MeanOneLogNormal, {"sigma" : "TranShkStd"}),
         },
         "dynamics": {
             "b": lambda k, R, PermGroFac: k * R / PermGroFac,
@@ -63,9 +61,10 @@
     **{
         "name": "portfolio",
         "shocks": {
-            "risky_return": Lognormal.from_mean_std(
-                calibration["Rfree"] + calibration["EqP"], RiskyStd
-            )
+            "risky_return": (Lognormal,{
+                "mean" : "Rfree + EqP",
+                "std" : "RiskyStd"
+            })
         },
         "dynamics": {
             "stigma": Control(["a"]),

From 7b1eddf0f715b5ca82b75980a08b9382e56c81e8 Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 12:04:54 -0400
Subject: [PATCH 2/9] optional mean/std initialization of Lognormal
 distribution, from constructor

---
 HARK/distribution.py | 14 ++++++++++++++
 1 file changed, 14 insertions(+)

diff --git a/HARK/distribution.py b/HARK/distribution.py
index d26ecec88..455c0c705 100644
--- a/HARK/distribution.py
+++ b/HARK/distribution.py
@@ -311,6 +311,8 @@ def __new__(
         mu: Union[float, np.ndarray] = 0.0,
         sigma: Union[float, np.ndarray] = 1.0,
         seed: Optional[int] = 0,
+        mean = None,
+        std = None
     ):
         """
         Create a new Lognormal distribution. If sigma is zero, return a
@@ -324,6 +326,10 @@ def __new__(
             Standard deviation of underlying normal distribution, by default 1.0
         seed : Optional[int], optional
             Seed for random number generator, by default None
+        mean: optional
+            For alternative mean/std parameterization, the mean of the lognormal distribution
+        std: optional
+            For alternative mean/std parameterization, the standard deviation of the lognormal distribution
 
         Returns
         -------
@@ -342,7 +348,15 @@ def __init__(
         mu: Union[float, np.ndarray] = 0.0,
         sigma: Union[float, np.ndarray] = 1.0,
         seed: Optional[int] = 0,
+        mean = None,
+        std = None
     ):
+        if mean is not None and sigma is not None:
+            mean_squared = mean**2
+            variance = std**2
+            mu = np.log(mean / (np.sqrt(1.0 + variance / mean_squared)))
+            sigma = np.sqrt(np.log(1.0 + variance / mean_squared))
+
         self.mu = np.asarray(mu)
         self.sigma = np.asarray(sigma)
 

From 214edad2d608fcdd0c409c3a6e70bd407d80f53a Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 12:06:35 -0400
Subject: [PATCH 3/9] construct_shocks method can construct shocks with
 mathematical expressions for arguments

---
 HARK/model.py | 59 ++++++++++++++++++++++++++++++++++++++++++++++++++-
 1 file changed, 58 insertions(+), 1 deletion(-)

diff --git a/HARK/model.py b/HARK/model.py
index 19c8df296..38e166e28 100644
--- a/HARK/model.py
+++ b/HARK/model.py
@@ -77,6 +77,56 @@ def discretized_shock_dstn(shocks, disc_params):
 
     return all_shock_dstn
 
+def construct_shocks(shock_data, scope):
+    """
+    Returns a dictionary from shock labels to Distributions.
+
+    When the corresponding value in shock_data contains
+    a distribution constructor and input information,
+    any symbolic expressions used in the inputs are
+    evaluated in the provided scope.
+
+    Parameters
+    ------------
+
+     shock_data: Mapping(str, Distribution or tuple)
+        A mapping from variable names to Distribution objects,
+        representing exogenous shocks.
+
+        Optionally, the mapping can be to tuples of Distribution
+        constructors and dictionary of input arguments.
+        In this case, the dictionary can map argument names to
+        numbers, or to strings. The strings are parsed as
+        mathematical expressions and evaluated in the scope
+        of a calibration dictionary.
+
+    scope: dict(str, values)
+        Variables assigned to numerical values.
+        The scope in which expressions will be evaluated
+    """
+    sd = deepcopy(shock_data)
+
+    for v in sd:
+        if isinstance(sd[v], tuple):
+            dist_class = sd[v][0]
+
+            dist_args = sd[v][1] # should be a dictionary
+
+            for a in dist_args:
+                if isinstance(dist_args[a], str):
+                    arg_lambda = math_text_to_lambda(dist_args[a])
+                    arg_value = arg_lambda(*[scope[var] for var in signature(arg_lambda).parameters])
+
+                    dist_args[a] = arg_value
+
+            print(v)
+            print(dist_class)
+            print(dist_args)
+            dist = dist_class(**dist_args)
+
+            sd[v] = dist
+
+    return sd
 
 def simulate_dynamics(
     dynamics: Mapping[str, Union[Callable, Control]],
@@ -149,10 +199,17 @@ class DBlock(Block):
 
     Parameters
     ----------
-    shocks: Mapping(str, Distribution)
+    shocks: Mapping(str, Distribution or tuple)
         A mapping from variable names to Distribution objects,
         representing exogenous shocks.
 
+        Optionally, the mapping can be to tuples of Distribution
+        constructors and dictionary of input arguments.
+        In this case, the dictionary can map argument names to
+        numbers, or to strings. The strings are parsed as
+        mathematical expressions and evaluated in the scope
+        of a calibration dictionary.
+
     dynamics: Mapping(str, str or callable)
         A dictionary mapping variable names to mathematical expressions.
         These expressions can be simple functions, in which case the

From e92e2497d0ebcfbe3b2c30fe5f51f5fac2d8f2d3 Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 12:07:39 -0400
Subject: [PATCH 4/9] monte carlo simulator now constructs shocks in
 calibration scope at initialization

---
 HARK/simulation/monte_carlo.py | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/HARK/simulation/monte_carlo.py b/HARK/simulation/monte_carlo.py
index fcae5f31a..2aa887710 100644
--- a/HARK/simulation/monte_carlo.py
+++ b/HARK/simulation/monte_carlo.py
@@ -14,7 +14,7 @@
 )
 from HARK.model import Aggregate
 from HARK.model import DBlock
-from HARK.model import simulate_dynamics
+from HARK.model import construct_shocks, simulate_dynamics
 
 
 def draw_shocks(shocks: Mapping[str, Distribution], conditions: Sequence[int]):
@@ -139,7 +139,11 @@ def __init__(
 
         self.calibration = calibration
         self.block = block
-        self.shocks = block.get_shocks()
+
+        # shocks are exogenous (but for age) but can depend on calibration
+        raw_shocks = block.get_shocks()
+        self.shocks = construct_shocks(raw_shocks, calibration)
+
         self.dynamics = block.get_dynamics()
         self.dr = dr
         self.initial = initial

From 6d62587fd9232de9726446a677579dfa98565efb Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 12:08:31 -0400
Subject: [PATCH 5/9] ruff fixes

---
 HARK/distribution.py    |  8 ++++----
 HARK/model.py           |  8 ++++++--
 HARK/models/consumer.py | 19 ++++++++-----------
 3 files changed, 18 insertions(+), 17 deletions(-)

diff --git a/HARK/distribution.py b/HARK/distribution.py
index 455c0c705..c53ea166d 100644
--- a/HARK/distribution.py
+++ b/HARK/distribution.py
@@ -311,8 +311,8 @@ def __new__(
         mu: Union[float, np.ndarray] = 0.0,
         sigma: Union[float, np.ndarray] = 1.0,
         seed: Optional[int] = 0,
-        mean = None,
-        std = None
+        mean=None,
+        std=None,
     ):
         """
         Create a new Lognormal distribution. If sigma is zero, return a
@@ -348,8 +348,8 @@ def __init__(
         mu: Union[float, np.ndarray] = 0.0,
         sigma: Union[float, np.ndarray] = 1.0,
         seed: Optional[int] = 0,
-        mean = None,
-        std = None
+        mean=None,
+        std=None,
     ):
         if mean is not None and sigma is not None:
             mean_squared = mean**2
diff --git a/HARK/model.py b/HARK/model.py
index 38e166e28..8105889d7 100644
--- a/HARK/model.py
+++ b/HARK/model.py
@@ -77,6 +77,7 @@ def discretized_shock_dstn(shocks, disc_params):
 
     return all_shock_dstn
 
+
 def construct_shocks(shock_data, scope):
     """
     Returns a dictionary from shock labels to Distributions.
@@ -110,12 +111,14 @@ def construct_shocks(shock_data, scope):
         if isinstance(sd[v], tuple):
             dist_class = sd[v][0]
 
-            dist_args = sd[v][1] # should be a dictionary
+            dist_args = sd[v][1]  # should be a dictionary
 
             for a in dist_args:
                 if isinstance(dist_args[a], str):
                     arg_lambda = math_text_to_lambda(dist_args[a])
-                    arg_value = arg_lambda(*[scope[var] for var in signature(arg_lambda).parameters])
+                    arg_value = arg_lambda(
+                        *[scope[var] for var in signature(arg_lambda).parameters]
+                    )
 
                     dist_args[a] = arg_value
 
@@ -128,6 +131,7 @@ def construct_shocks(shock_data, scope):
 
     return sd
 
+
 def simulate_dynamics(
     dynamics: Mapping[str, Union[Callable, Control]],
     pre: Mapping[str, Any],
diff --git a/HARK/models/consumer.py b/HARK/models/consumer.py
index 61d2afb5a..d629173cc 100644
--- a/HARK/models/consumer.py
+++ b/HARK/models/consumer.py
@@ -16,17 +16,17 @@
     "LivPrb": 0.98,
     "PermGroFac": 1.01,
     "BoroCnstArt": None,
-    "TranShkStd" : 0.1,
-    "LivPrb" : 0.98,
-    "RiskyStd" : 0.1
+    "TranShkStd": 0.1,
+    "LivPrb": 0.98,
+    "RiskyStd": 0.1,
 }
 
 consumption_block = DBlock(
     **{
         "name": "consumption",
         "shocks": {
-            "live": (Bernoulli, {'p' : 'LivPrb'}),  # Move to tick or mortality block?
-            "theta": (MeanOneLogNormal, {'sigma' : 'TranShkStd'}),
+            "live": (Bernoulli, {"p": "LivPrb"}),  # Move to tick or mortality block?
+            "theta": (MeanOneLogNormal, {"sigma": "TranShkStd"}),
         },
         "dynamics": {
             "b": lambda k, R: k * R,
@@ -44,8 +44,8 @@
     **{
         "name": "consumption normalized",
         "shocks": {
-            "live": (Bernoulli, {"p" : "LivPrb"}),  # Move to tick or mortality block?
-            "theta": (MeanOneLogNormal, {"sigma" : "TranShkStd"}),
+            "live": (Bernoulli, {"p": "LivPrb"}),  # Move to tick or mortality block?
+            "theta": (MeanOneLogNormal, {"sigma": "TranShkStd"}),
         },
         "dynamics": {
             "b": lambda k, R, PermGroFac: k * R / PermGroFac,
@@ -61,10 +61,7 @@
     **{
         "name": "portfolio",
         "shocks": {
-            "risky_return": (Lognormal,{
-                "mean" : "Rfree + EqP",
-                "std" : "RiskyStd"
-            })
+            "risky_return": (Lognormal, {"mean": "Rfree + EqP", "std": "RiskyStd"})
         },
         "dynamics": {
             "stigma": Control(["a"]),

From 084d0d5dbd6dea5700d16d0fda9fe7e74de9c121 Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 12:16:04 -0400
Subject: [PATCH 6/9] changelog update for new shock constructor #1464

---
 Documentation/CHANGELOG.md | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Documentation/CHANGELOG.md b/Documentation/CHANGELOG.md
index 978d117bf..a9d7f959f 100644
--- a/Documentation/CHANGELOG.md
+++ b/Documentation/CHANGELOG.md
@@ -17,6 +17,7 @@ Release Date: TBD
 - Adds a discretize method to DBlocks and RBlocks (#1460)[https://github.com/econ-ark/HARK/pull/1460]
 - Allows structural equations in model files to be provided in string form [#1427](https://github.com/econ-ark/HARK/pull/1427)
 - Introduces `HARK.parser' module for parsing configuration files into models [#1427](https://github.com/econ-ark/HARK/pull/1427)
+- Allows construction of shocks with arguments based on mathematical expressions [#1464](https://github.com/econ-ark/HARK/pull/1464)
 
 #### Minor Changes
 

From a1f01f6cb922ec9dc759d5b14ef23489d1324235 Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 14:28:24 -0400
Subject: [PATCH 7/9] adding construct_shock method to blocks, fixing tests

---
 HARK/model.py            | 14 ++++++++++++++
 HARK/tests/test_model.py |  2 ++
 2 files changed, 16 insertions(+)

diff --git a/HARK/model.py b/HARK/model.py
index 8105889d7..1fdf35bb4 100644
--- a/HARK/model.py
+++ b/HARK/model.py
@@ -228,6 +228,13 @@ class DBlock(Block):
     dynamics: dict = field(default_factory=dict)
     reward: dict = field(default_factory=dict)
 
+    def construct_shocks(self, calibration):
+        """
+        Constructs all shocks given calibration.
+        This method mutates the DBlock.
+        """
+        self.shocks = construct_shocks(self.shocks, calibration)
+
     def discretize(self, disc_params):
         """
         Returns a new DBlock which is a copy of this one, but with shock discretized.
@@ -360,6 +367,13 @@ class RBlock(Block):
     description: str = ""
     blocks: List[Block] = field(default_factory=list)
 
+    def construct_shocks(self, calibration):
+        """
+        Construct all shocks given a calibration dictionary.
+        """
+        for b in self.blocks:
+            b.construct_shocks(calibration)
+
     def discretize(self, disc_params):
         """
         Recursively discretizes all the blocks.
diff --git a/HARK/tests/test_model.py b/HARK/tests/test_model.py
index ad14fe27e..0632a6a7f 100644
--- a/HARK/tests/test_model.py
+++ b/HARK/tests/test_model.py
@@ -34,6 +34,7 @@ class test_DBlock(unittest.TestCase):
     def setUp(self):
         self.test_block_A = model.DBlock(**test_block_A_data)
         self.cblock = cons.consumption_block_normalized
+        self.cblock.construct_shocks(cons.calibration)
 
         # prior states relative to the decision, so with realized shocks.
         self.dpre = {"k": 2, "R": 1.05, "PermGroFac": 1.1, "theta": 1, "CRRA": 2}
@@ -98,6 +99,7 @@ def test_init(self):
         self.assertEqual(len(r_block_tree.get_shocks()), 3)
 
     def test_discretize(self):
+        self.cpp.construct_shocks(cons.calibration)
         cppd = self.cpp.discretize({"theta": {"N": 5}, "risky_return": {"N": 6}})
 
         self.assertEqual(len(cppd.get_shocks()["theta"].pmv), 5)

From 7e93ece349c722515d01883197708f98cce5a6bc Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Tue, 2 Jul 2024 14:51:39 -0400
Subject: [PATCH 8/9] remove repeated key literal

---
 HARK/models/consumer.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/HARK/models/consumer.py b/HARK/models/consumer.py
index d629173cc..5a990b4e8 100644
--- a/HARK/models/consumer.py
+++ b/HARK/models/consumer.py
@@ -17,7 +17,6 @@
     "PermGroFac": 1.01,
     "BoroCnstArt": None,
     "TranShkStd": 0.1,
-    "LivPrb": 0.98,
     "RiskyStd": 0.1,
 }
 

From a5814851b68973e232a084b98a11d4dd66429821 Mon Sep 17 00:00:00 2001
From: sb <sbenthall@gmail.com>
Date: Wed, 3 Jul 2024 11:59:13 -0400
Subject: [PATCH 9/9] removing stray print statements

---
 HARK/model.py | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/HARK/model.py b/HARK/model.py
index 1fdf35bb4..7f92bb2e6 100644
--- a/HARK/model.py
+++ b/HARK/model.py
@@ -122,9 +122,6 @@ def construct_shocks(shock_data, scope):
 
                     dist_args[a] = arg_value
 
-            print(v)
-            print(dist_class)
-            print(dist_args)
             dist = dist_class(**dist_args)
 
             sd[v] = dist