diff --git a/notebooks/ChangeLiqConstr.ipynb b/notebooks/ChangeLiqConstr.ipynb index 23de8e76..22613966 100644 --- a/notebooks/ChangeLiqConstr.ipynb +++ b/notebooks/ChangeLiqConstr.ipynb @@ -18,7 +18,6 @@ "from HARK.ConsumptionSaving.ConsIndShockModel import *\n", "import HARK.ConsumptionSaving.ConsumerParameters as Params\n", "from HARK.utilities import plotFuncsDer, plotFuncs\n", - "from time import clock\n", "mystr = lambda number : \"{:.4f}\".format(number)\n", "\n", "import matplotlib.pyplot as plt" @@ -120,7 +119,7 @@ ], "metadata": { "jupytext": { - "cell_metadata_filter": "collapsed", + "cell_metadata_filter": "collapsed,code_folding", "formats": "ipynb,py" }, "kernelspec": { @@ -142,5 +141,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/ChangeLiqConstr.py b/notebooks/ChangeLiqConstr.py index eccf11d4..753b9771 100644 --- a/notebooks/ChangeLiqConstr.py +++ b/notebooks/ChangeLiqConstr.py @@ -1,13 +1,13 @@ # --- # jupyter: # jupytext: -# cell_metadata_filter: collapsed +# cell_metadata_filter: collapsed,code_folding # formats: ipynb,py # text_representation: # extension: .py # format_name: light # format_version: '1.4' -# jupytext_version: 1.2.1 +# jupytext_version: 1.2.3 # kernelspec: # display_name: Python 3 # language: python @@ -22,7 +22,6 @@ from HARK.ConsumptionSaving.ConsIndShockModel import * import HARK.ConsumptionSaving.ConsumerParameters as Params from HARK.utilities import plotFuncsDer, plotFuncs -from time import clock mystr = lambda number : "{:.4f}".format(number) import matplotlib.pyplot as plt diff --git a/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.ipynb b/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.ipynb index cd5228a7..510b2032 100644 --- a/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.ipynb +++ b/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.ipynb @@ -35,7 +35,6 @@ "\n", "import numpy as np\n", "import HARK \n", - "from time import clock\n", "from copy import deepcopy\n", "mystr = lambda number : \"{:.4f}\".format(number)\n", "from HARK.utilities import plotFuncs" @@ -369,5 +368,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.py b/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.py index 826bd3d2..bce0b71a 100644 --- a/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.py +++ b/notebooks/Gentle-Intro-To-HARK-PerfForesightCRRA.py @@ -6,7 +6,7 @@ # extension: .py # format_name: percent # format_version: '1.2' -# jupytext_version: 1.2.1 +# jupytext_version: 1.2.3 # kernelspec: # display_name: Python 3 # language: python @@ -35,7 +35,6 @@ import numpy as np import HARK -from time import clock from copy import deepcopy mystr = lambda number : "{:.4f}".format(number) from HARK.utilities import plotFuncs diff --git a/notebooks/Gentle-Intro-To-HARK.ipynb b/notebooks/Gentle-Intro-To-HARK.ipynb deleted file mode 100644 index a6123958..00000000 --- a/notebooks/Gentle-Intro-To-HARK.ipynb +++ /dev/null @@ -1,579 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# A Gentle Introduction to HARK\n", - "\n", - "This notebook provides a simple, hands-on tutorial for first time HARK users -- and potentially first time Python users. It does not go \"into the weeds\" - we have hidden some code cells that do boring things that you don't need to digest on your first experience with HARK. Our aim is to convey a feel for how the toolkit works.\n", - "\n", - "For readers for whom this is your very first experience with Python, we have put important Python concepts in **boldface**. For those for whom this is the first time they have used a Jupyter notebook, we have put Jupyter instructions in _italics_. Only cursory definitions (if any) are provided here. If you want to learn more, there are many online Python and Jupyter tutorials." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "code_folding": [] - }, - "outputs": [], - "source": [ - "# This cell has a bit of initial setup. You can click the triangle to the left to expand it.\n", - "# Click the \"Run\" button immediately above the notebook in order to execute the contents of any cell\n", - "# WARNING: Each cell in the notebook relies upon results generated by previous cells\n", - "# The most common problem beginners have is to execute a cell before all its predecessors\n", - "# If you do this, you can restart the kernel (see the \"Kernel\" menu above) and start over\n", - "%matplotlib inline\n", - "import matplotlib.pyplot as plt\n", - "from tqdm import tqdm\n", - "import numpy as np\n", - "import HARK \n", - "from time import clock\n", - "from copy import deepcopy\n", - "mystr = lambda number : \"{:.4f}\".format(number)\n", - "from HARK.utilities import plotFuncs" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Your First HARK Model: Perfect Foresight\n", - "\n", - "$$\\newcommand{\\CRRA}{\\rho}\\newcommand{\\DiscFac}{\\beta}$$\n", - "We start with almost the simplest possible consumption model: A consumer with CRRA utility \n", - "\n", - "\\begin{equation}\n", - "U(C) = \\frac{C^{1-\\CRRA}}{1-\\rho}\n", - "\\end{equation}\n", - "\n", - "has perfect foresight about everything except the (stochastic) date of death, which occurs with constant probability implying a \"survival probability\" $\\newcommand{\\LivPrb}{\\aleph}\\LivPrb < 1$. Permanent labor income $P_t$ grows from period to period by a factor $\\Gamma_t$. At the beginning of each period $t$, the consumer has some amount of market resources $M_t$ (which includes both market wealth and currrent income) and must choose how much of those resources to consume $C_t$ and how much to retain in a riskless asset $A_t$ which will earn return factor $R$. The agent's flow of utility $U(C_t)$ from consumption is geometrically discounted by factor $\\beta$. Between periods, the agent dies with probability $\\mathsf{D}_t$, ending his problem.\n", - "\n", - "The agent's problem can be written in Bellman form as:\n", - "\n", - "\\begin{eqnarray*}\n", - "V_t(M_t,P_t) &=& \\max_{C_t}~U(C_t) + \\beta \\aleph V_{t+1}(M_{t+1},P_{t+1}), \\\\\n", - "& s.t. & \\\\\n", - "%A_t &=& M_t - C_t, \\\\\n", - "M_{t+1} &=& R (M_{t}-C_{t}) + Y_{t+1}, \\\\\n", - "P_{t+1} &=& \\Gamma_{t+1} P_t, \\\\\n", - "\\end{eqnarray*}\n", - "\n", - "A particular perfect foresight agent's problem can be characterized by values of risk aversion $\\rho$, discount factor $\\beta$, and return factor $R$, along with sequences of income growth factors $\\{ \\Gamma_t \\}$ and survival probabilities $\\{\\mathsf{\\aleph}_t\\}$. To keep things simple, let's forget about \"sequences\" of income growth and mortality, and just think about an $\\textit{infinite horizon}$ consumer with constant income growth and survival probability.\n", - "\n", - "## Representing Agents in HARK\n", - "\n", - "HARK represents agents solving this type of problem as $\\textbf{instances}$ of the $\\textbf{class}$ $\\texttt{PerfForesightConsumerType}$, a $\\textbf{subclass}$ of $\\texttt{AgentType}$. To make agents of this class, we must import the class itself into our workspace. (Run the cell below in order to do this)." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The $\\texttt{PerfForesightConsumerType}$ class contains within itself the python code that constructs the solution for the perfect foresight model we are studying here, as specifically articulated in [these lecture notes](http://econ.jhu.edu/people/ccarroll/public/lecturenotes/consumption/PerfForesightCRRA/). \n", - "\n", - "To create an instance of $\\texttt{PerfForesightConsumerType}$, we simply call the class as if it were a function, passing as arguments the specific parameter values we want it to have. In the hidden cell below, we define a $\\textbf{dictionary}$ named $\\texttt{PF_dictionary}$ with these parameter values:\n", - "\n", - "| Param | Description | Code | Value |\n", - "| :---: | --- | --- | :---: |\n", - "| $\\rho$ | Relative risk aversion | $\\texttt{CRRA}$ | 2.5 |\n", - "| $\\beta$ | Discount factor | $\\texttt{DiscFac}$ | 0.96 |\n", - "| $R$ | Risk free interest factor | $\\texttt{Rfree}$ | 1.03 |\n", - "| $\\newcommand{\\LivFac}{\\aleph}\\LivFac$ | Survival probability | $\\texttt{LivPrb}$ | 0.98 |\n", - "| $\\Gamma$ | Income growth factor | $\\texttt{PermGroFac}$ | 1.01 |\n", - "\n", - "\n", - "For now, don't worry about the specifics of dictionaries. All you need to know is that a dictionary lets us pass many arguments wrapped up in one simple data structure." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "code_folding": [] - }, - "outputs": [], - "source": [ - "# This cell defines a parameter dictionary. You can expand it if you want to see what that looks like.\n", - "PF_dictionary = {\n", - " 'CRRA' : 2.5,\n", - " 'DiscFac' : 0.96,\n", - " 'Rfree' : 1.03,\n", - " 'LivPrb' : [0.98],\n", - " 'PermGroFac' : [1.01],\n", - " 'T_cycle' : 1,\n", - " 'cycles' : 0,\n", - " 'AgentCount' : 10000\n", - "}\n", - "\n", - "# To those curious enough to open this hidden cell, you might notice that we defined\n", - "# a few extra parameters in that dictionary: T_cycle, cycles, and AgentCount. Don't\n", - "# worry about these for now." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's make an **object** named $\\texttt{PFexample}$ which is an **instance** of the $\\texttt{PerfForesightConsumerType}$ class. The object $\\texttt{PFexample}$ will bundle together the abstract mathematical description of the solution embodied in $\\texttt{PerfForesightConsumerType}$, and the specific set of parameter values defined in $\\texttt{PF_dictionary}$. Such a bundle is created passing $\\texttt{PF_dictionary}$ to the class $\\texttt{PerfForesightConsumerType}$:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "PFexample = PerfForesightConsumerType(**PF_dictionary) \n", - "# the asterisks ** basically say \"here come some arguments\" to PerfForesightConsumerType" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In $\\texttt{PFexample}$, we now have _defined_ the problem of a particular infinite horizon perfect foresight consumer who knows how to solve this problem. \n", - "\n", - "## Solving an Agent's Problem\n", - "\n", - "To tell the agent actually to solve the problem, we call the agent's $\\texttt{solve}$ **method**. (A *method** is essentially a function that an object runs that affects the object's own internal characteristics -- in this case, the method adds the consumption function to the contents of $\\texttt{PFexample}$.)\n", - "\n", - "The cell below calls the $\\texttt{solve}$ method for $\\texttt{PFexample}$" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "PFexample.solve()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Running the $\\texttt{solve}$ method creates the **attribute** of $\\texttt{PFexample}$ named $\\texttt{solution}$. In fact, every subclass of $\\texttt{AgentType}$ works the same way: The class definition contains the abstract algorithm that knows how to solve the model, but to obtain the particular solution for a specific instance (paramterization/configuration), that instance must be instructed to $\\texttt{solve()}$ its problem. \n", - "\n", - "The $\\texttt{solution}$ attribute is always a $\\textit{list}$ of solutions to a single period of the problem. In the case of an infinite horizon model like the one here, there is just one element in that list -- the solution to all periods of the infinite horizon problem. The consumption function stored as the first element (element 0) of the solution list can be retrieved by:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "PFexample.solution[0].cFunc" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "One of the results proven in the associated [the lecture notes](http://econ.jhu.edu/people/ccarroll/public/lecturenotes/consumption/PerfForesightCRRA/) is that, for the specific problem defined above, there is a solution in which the _ratio_ $c = C/P$ is a linear function of the _ratio_ of market resources to permanent income, $m = M/P$. \n", - "\n", - "This is why $\\texttt{cFunc}$ can be represented by a linear interpolation. It can be plotted between an $m$ ratio of 0 and 10 using the command below." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4xLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvDW2N/gAAIABJREFUeJzt3Xl8VPW9//HXlyVAwk4IS0IIexI2gQgo7uLCqlbrUkVt9Ufb69Vqbaul1aRQd0u11o26W63Xq7aGTUBEBXdAFCYLCfsSCGs2ss/n90dyr1wKkkCSM8v7+Xj4YDJzZs7bMXl78p1zPjgzQ0REQlczrwOIiEjjUtGLiIQ4Fb2ISIhT0YuIhDgVvYhIiFPRi4iEOBW9iEiIU9GLiIQ4Fb2ISIhr4dWOo6OjLSEhwavdi4gEpVWrVu01s671eY5nRZ+QkMDKlSu92r2ISFByzm2p73O0dCMiEuJU9CIiIU5FLyIS4o5b9M65Xs65Zc65TOeczzn3i6Nsc45zrsA5t6b2n3sbJ66IiNRXXT6MrQLuNLPVzrl2wCrn3BIzyzhiu+VmNrnhI4qIyMk47hG9meWZ2era20VAJhDb2MFERKRh1GuN3jmXAIwAvjjKw6c5575xzi10zg1ugGwiItIA6nwevXOuLfA2cLuZFR7x8Gqgt5kVO+cmAv8CBhzlNaYD0wHi4+NPOLSISLgxM+avzTuh59bpiN4515Kakn/NzN45SoBCMyuuvb0AaOmciz7KdnPMLMXMUrp2rdeFXSIiYStrVyHX/O1z/vP1r0/o+XU568YBzwOZZjb7GNt0r90O59zo2tfdd0KJREQEgILSStLSfUz6ywqydhXxx0uHnNDr1GXpZhwwDVjrnFtTe98MIB7AzJ4BrgB+7pyrAkqBq83MTiiRiEiY8/uN/161jYffy+bAoQp+NCaeOy8YRKeoCKadwOsdt+jNbAXgjrPNX4G/nsD+RUTkMGu2HST13XV8s72AlN6deHnqaIbEdjip1/RsqJmIiHxnb3E5D7+XxZsrtxPTrhV/vmo4l54SS+2q+ElR0YuIeKiy2s+rn23hz++vp7Simp+e1Zdbzx9A21YNV88qehERj3y6YS9p6T7W7y7mzAHRpE4ZTP+Ytg2+HxW9iEgT23GwlPvnZzJ/bR5xndrw7LRRXJjcrUGWaY5GRS8i0kTKKqt5bvlG/rosFzO4Y/xAfnp2X1q3bN6o+1XRi4g0MjNjaWY+M+dlsHX/ISYM6c7vJiUR1ymySfavohcRaUQb9xQzc14GH2bvoX9MW/5+0xjOGPBvgwMalYpeRKQRlJRX8cQHuTy/YiOtWjTn95OSuOH0BFo2b/q/70lFLyLSgMyM9G928sCCLHYVlnH5yDjumjCImHatPcukohcRaSCZeYWkpvv4ctN+hsZ24MlrRzKqdyevY6noRURO1sFDFcxesp6/f76FDm1a8sAPhnJlSi+aN2uc0yXrS0UvInKCqv3Gf321jUcWZVFQWsl1Y3vzywsG0jEywuto/4eKXkTkBKzacoC0dB9rdxQwOqEzaVMHk9yzvdexjkpFLyJSD/lFZTy0MJu3V2+nW/tWPH71KUwd3rPRrmptCCp6EZE6qKz28/Knm3ns/RzKq6r52dn9uPW8/kQ14PCxxhL4CUVEPLYiZy9pc33k5hdzzqCu3Ds5mb5dG374WGNR0YuIHMP2A4e4b34mC9ftIr5zJM9dn8L5STEBvUxzNCp6EZEjlFVW8+xHG3n6o1wAfnXhQG4+s/GHjzUWFb2ISC0zY3HGbmbNy2D7gVImDe3BjElJxHZs43W0k6KiFxEBNuwpJi3dx/KcvQzs1pbXbx7D6f2bdvhYY1HRi0hYKy6v4omlObzwySZat2jOvZOTmXZab0+GjzUWFb2IhCUz419rdvDAgizyi8q5MiWO31ycSHTbVl5Ha3AqehEJO+t2FJCW7mPllgMMj+vAs9NGMSLe++FjjUVFLyJh40BJBY8uzuYfX26lU2QED10+lB+O6kWzABk+1lhU9CIS8qr9xj++3Mqji7MpKqvi+tMSuOOCgXRo09LraE1CRS8iIW3l5v2kpvvw7SxkbN+a4WOJ3QNz+FhjUdGLSEjKLyzjgYVZ/PPrHfTo0JonrhnB5GE9gu6q1oagoheRkFJR5efFTzbxl6U5VFYbt5zbj1vO7U9kRPjWXfj+m4tIyPl4/R7S5vrYuKeE8xNjuGdyMgnRUV7H8pyKXkSC3rb9h5g1L4PFGbtJ6BLJizeeyrmJMV7HChgqehEJWqUV1Tz90Qae/WgDzZzjNxcP4qYz+tCqRXAOH2ssKnoRCTpmxiLfLmbNy2THwVKmDO/JjImJ9OgQ3MPHGouKXkSCSm5+EWnpGazI3Uti93a8MX0sY/t28TpWQFPRi0hQKCqr5PH3c3jp081ERjQnbUoy143tTYsQGj7WWFT0IhLQ/H7jna938ODCLPaVlHNVSi9+fdEguoTg8LHGoqIXkYC1dnsBqenrWL31IKf06sgLN6YwLK6j17GCjopeRALO/pIKHlmUzRtfbaVLVASPXDGMy0fGhfzwscaioheRgFFV7ef1L7fyp8XrKS6v4ifj+vCL8QNo3zo8ho81FhW9iASELzft595315G1q4jT+3UhbepgBnZr53WskHDconfO9QJeAboDfmCOmT1+jG1PBT4HrjKztxoyqIiEpl0FZdy/IJP0b3YS27ENT107kglDuofl8LHGUpcj+irgTjNb7ZxrB6xyzi0xs4zDN3LONQceAhY1Qk4RCTHlVdW8sGIzT3yQQ5XfuO28/vz8nP60idBVrQ3tuEVvZnlAXu3tIudcJhALZByx6a3A28CpDR1SRELLsux8Zs7NYNPeEi5I7sY9k5KJ7xLpdayQVa81eudcAjAC+OKI+2OBy4DzUNGLyDFs2VfCrHkZvJ+ZT9/oKF768amcM0jDxxpbnYveOdeWmiP2282s8IiHHwPuMrPq71tXc85NB6YDxMfH1z+tiASl0opqnvowl2c/3kjLZo67JyTyk3F9iGihq1qbgjOz42/kXEtgHrDIzGYf5fFNwP80fDRwCJhuZv861mumpKTYypUrTyi0iAQHM2PB2l3cNz+DnQVlXHpKT347MYlu7Vt7HS1oOedWmVlKfZ5Tl7NuHPA8kHm0kgcwsz6Hbf8SMO/7Sl5EQt/63UWkvuvjs437SOrRnseuHsHoPp29jhWW6rJ0Mw6YBqx1zq2pvW8GEA9gZs80UjYRCUIFpZU89v56XvlsC21btWDWJYP50ZjeNNdVrZ6py1k3K/huWea4zOzGkwkkIsHJ7zfeWrWdhxdlsa+kgmtGx/OrCwfROSrC62hhT1fGishJ+2bbQe5N9/HNtoOM6t2Jl348miGxHbyOJbVU9CJywvYWl/PIe9m8uWobXaJaMfvK4Vw2IlZXtQYYFb2I1FtVtZ9XP9/C7CXrKa2o5uYz+nDb+QNop+FjAUlFLyL18tmGfaSl+8jeXcSZA6JJnZJM/xgNHwtkKnoRqZOdB0u5b0Em87/NI7ZjG565bhQXDe6mZZogoKIXke9VXlXNc8s38dcPcvGbcfv4Afzs7H60bqnhY8FCRS8ix7Q0czcz52WwZd8hLhrcjd9PSqZXZw0fCzYqehH5N5v2ljBzro9l2Xvo1zWKV28azZkDunodS06Qil5E/ldJeRVPLsvlueWbiGjRjN9NTOKG0xM0fCzIqehFBDNj7rd53D8/k12FZfxgRCx3T0gkRsPHQoKKXiTMZeYVkpbu44tN+xncsz1PXjuCUb01fCyUqOhFwlTBoUpmL8nm1c+30L5NS+67bAhXnxqv4WMhSEUvEmb8fuPNldt4eFE2Bw9VcO2Y3tx54UA6Rmr4WKhS0YuEka+3HiA13ce32ws4NaETaVNHM7inho+FOhW9SBjYU1TOQ+9l8daq7cS0a8VjV53CJaf01FWtYUJFLxLCKqv9vPLZFh5bsp6yqmp+enZfbj1vAG1b6Uc/nOi/tkiI+jR3L6npPnLyizl7YFfunZJMv65tvY4lHlDRi4SYHQdLuW9+BgvW7qJX5zb87foUxifFaJkmjKnoRUJEWWU1cz7eyFMf5gLwywsGMv2svho+Jip6kWBnZryfmc/MeT627S9l4tDuzJiYRFwnDR+TGip6kSC2cU8xf5ibwUfr9zAgpi2v3TyGcf2jvY4lAUZFLxKEisureOKDHF5YsYnWLZrz+0k1w8daNtfwMfl3KnqRIGJmvLtmJw8szGR3YTlXjIrjrosT6dquldfRJICp6EWChG9nAWnpPr7afIBhcR14+rpRjIzv5HUsCQIqepEAd/BQBX9avJ7XvthCx8gIHvzBUK5M6UUzDR+TOlLRiwSoar/xxldbeXRRNgWllVx/WgJ3jB9Ih8iWXkeTIKOiFwlAq7bsJzXdx7odhYzu05k/TB1MUo/2XseSIKWiFwkg+UVlPLgwi3dW76B7+9b85ZoRTBnWQ1e1yklR0YsEgIoqPy9/upnHl+ZQUeXnP87pxy3n9idKw8ekAei7SMRjy3P2kJbuY8OeEs5LjOGeycn0iY7yOpaEEBW9iEe27T/EH+dnsMi3m95dInn+hhTOT+rmdSwJQSp6kSZWVlnNMx9t4OkPN9DMOX590SBuOqOPho9Jo1HRizQRM2ORbzd/nJ/B9gOlTB7WgxkTk+jZsY3X0STEqehFmkBufjF/mOtjec5eBnVrx+v/bwyn99PwMWkaKnqRRlRUVslflubw4iebaRPRnNQpyUwb25sWGj4mTUhFL9II/H7jn1/v4MH3sthbXM6Vo3rx64sHEd1Ww8ek6anoRRrYuh0FpKb7WLXlAMN7deS561MY3quj17EkjKnoRRrIgZIKHlmczT++3ErnyAgevmIYV4yM0/Ax8ZyKXuQkVfuN17+sGT5WXF7FjacncPv4gXRoo+FjEhiOW/TOuV7AK0B3wA/MMbPHj9jmEmBW7eNVwO1mtqLh44oElq827yf1XR8ZeYWc1rcLaVMHM6h7O69jifwfdTmirwLuNLPVzrl2wCrn3BIzyzhsm6VAupmZc24Y8CaQ2Ah5RQLC7sIyHliQyb/W7KRnh9Y8+aORTBzaXcPHJCAdt+jNLA/Iq71d5JzLBGKBjMO2KT7sKVGANXBOkYBQUeXnhU828cTSHCqrjf88tz//cW4/IiO0CiqBq17fnc65BGAE8MVRHrsMeACIASY1QDaRgPJhdj4z52awcW8J45Nqho/17qLhYxL46lz0zrm2wNvUrL8XHvm4mf0T+Kdz7ixq1uvHH+U1pgPTAeLj4080s0iT2rrvELPmZ7AkYzd9oqN48cencu6gGK9jidRZnYreOdeSmpJ/zcze+b5tzexj51w/51y0me094rE5wByAlJQULe9IQCutqObpD3N55uONtGjmuOviRH5yRgKtWmj4mASXupx144DngUwzm32MbfoDG2o/jB0JRAD7GjSpSBMxMxau28V98zPZcbCUS07pyW8nJNG9Q2uvo4mckLoc0Y8DpgFrnXNrau+bAcQDmNkzwOXA9c65SqAUuMrMdMQuQSdndxFpc318kruPxO7t+K/pYxnTt4vXsUROSl3OulkBfO85Y2b2EPBQQ4USaWqFZZU8tiSHlz/bTFREc2ZeMpgfjY7X8DEJCTonTMKa32+8vXo7D72Xxb6SCq4+NZ5fXzSIzlERXkcTaTAqeglb324/SGq6j6+3HmREfEdevHE0Q+M6eB1LpMGp6CXs7Csu55FF2fzXym10iWrFoz8czg9GxGr4mIQsFb2EjapqP699sZU/Lc7mUEU1N43rw23jB9C+tYaPSWhT0UtY+HzjPtLSfWTtKuKM/tGkTU2mf4yGj0l4UNFLSMsrKOX+BVnM/WYnsR3b8PS1I7l4iIaPSXhR0UtIKq+q5rnlm3hyWS5VfuO28wfw87P70SZCV7VK+FHRS8hZlpXPH+b62LzvEBcmd+Oeycn06hzpdSwRz6joJWRs3lvCrHkZLM3Kp2/XKF75yWjOGtjV61ginlPRS9A7VFHFk8ty+dvHm2jZ3DFjYiI3nt6HiBa6qlUEVPQSxMyMed/mcf+CTPIKyrhsRCx3T0ikW3sNHxM5nIpeglLWrkLS0n18vnE/yT3a88Q1I0hJ6Ox1LJGApKKXoFJQWsmfl6zn1c+30K51C/546RCuGR1Pc13VKnJMKnoJCn6/8d+rtvHwe9nsP1TBj0bH86sLB9FJw8dEjktFLwFvzbaDpL67jm+2F5DSuxMvTx3NkFgNHxOpKxW9BKy9xeU8/F4Wb67cTky7Vvz5quFcekqsrmoVqScVvQScymo/r362hT+/v57Simp+elZfbj1/AG1b6dtV5EToJ0cCyqcb9pKW7mP97mLOHBBN6pTB9I9p63UskaCmopeAsONgKffPz2T+2jziOrXh2WmjuDC5m5ZpRBqAil48VVZZzXPLN/LXZbmYwR3jB/LTs/vSuqWGj4k0FBW9eMLMWJqZz8x5GWzdf4gJQ7rzu0lJxHXS8DGRhqailya3cU8xM+dl8GH2HvrHtOXvN43hjAHRXscSCVkqemkyJeVVPPFBLs+v2EirFs35/aQkbjg9gZbNNXxMpDGp6KXRmRnp3+zkgQVZ7Cos4/KRcdw1YRAx7TR8TKQpqOilUWXmFZKa7uPLTfsZGtuBJ68dyajenbyOJRJWVPTSKA4eqmD2kvX8/fMtdGjTkvsvG8pVp/bS8DERD6jopUFV+403V27j4feyKCit5LqxvfnlBQPpGKnhYyJeUdFLg1m99QCp7/pYu6OA0QmdSZs6mOSe7b2OJRL2VPRy0vKLynhoYTZvr95Ot/atePzqU5g6vKeuahUJECp6OWGV1X5e/nQzj72fQ3lVNT87ux+3ntefKA0fEwko+omUE7IiZy9pc33k5hdzzqCu3Ds5mb5dNXxMJBCp6KVeth84xH3zM1m4bhfxnSN57voUzk+K0TKNSABT0UudlFVW8+xHG3n6o1wAfnXhQG4+U8PHRIKBil6+l5mxOGM3s+ZlsP1AKZOG9mDGpCRiO7bxOpqI1JGKXo5pw55i0tJ9LM/Zy8BubXn95jGc3l/Dx0SCjYpe/k1xeRVPLM3hhU820bpFc+6dnMy003pr+JhIkFLRy/8yM/61ZgcPLMgiv6icK1Pi+M3FiUS3beV1NBE5CSp6AWDdjgLS0n2s3HKA4XEdeHbaKEbEa/iYSChQ0Ye5AyUV/GlJNq9/sZVOkRE8dPlQfjiqF800fEwkZBy36J1zvYBXgO6AH5hjZo8fsc21wF21XxYDPzezbxo4qzSgar/xjy+38ujibIrKqrj+tATuuGAgHdq09DqaiDSwuhzRVwF3mtlq51w7YJVzbomZZRy2zSbgbDM74JybAMwBxjRCXmkAKzfvJzXdh29nIWP71gwfS+yu4WMioeq4RW9meUBe7e0i51wmEAtkHLbNp4c95XMgroFzSgPILyzjgYVZ/PPrHfTo0JonrhnB5GE9dFWrSIir1xq9cy4BGAF88T2b3QQsPMbzpwPTAeLj4+uzazkJFVV+XvxkE39ZmkNltXHLuf245dz+REboIxqRcFDnn3TnXFvgbeB2Mys8xjbnUlP0ZxztcTObQ82yDikpKVbvtFJvH6/fQ9pcHxv3lHB+Ygz3TE4mITrK61gi0oTqVPTOuZbUlPxrZvbOMbYZBjwHTDCzfQ0XUU7Etv2HmDUvg8UZu0noEsmLN57KuYkxXscSEQ/U5awbBzwPZJrZ7GNsEw+8A0wzs/UNG1Hqo7Simqc/2sCzH22gmXP85uJB3HRGH1q10PAxkXBVlyP6ccA0YK1zbk3tfTOAeAAzewa4F+gCPFX7wV6VmaU0fFw5FjNjkW8Xs+ZlsuNgKVOG92TGxER6dNDwMZFwV5ezblYA33tahpndDNzcUKGkfnLzi0hLz2BF7l4Su7fjjeljGdu3i9exRCRA6LSLIFZUVsnj7+fw0qebiYxoTtqUZK4b25sWGj4mIodR0Qchv9945+sdPLgwi30l5VyV0otfXzSILho+JiJHoaIPMut2FHDvu+tYvfUgp/TqyPM3pDC8V0evY4lIAFPRB4n9JRU8siibN77aSpeoCB65YhiXj4zT8DEROS4VfYCrqvbz+pdb+dPi9RSXV/GTcX34xfgBtG+t4WMiUjcq+gD25ab93PvuOrJ2FXF6vy6kTR3MwG7tvI4lIkFGRR+AdhWUcf+CTNK/2UnPDq156tqRTBjSXcPHROSEqOgDSHlVNS+s2MwTH+RQ5TduO68/Pz+nP20idFWriJw4FX2AWJadz8y5GWzaW8IFyd24Z1Iy8V0ivY4lIiFARe+xLftKmDUvg/cz8+kbHcVLPz6VcwZp+JiINBwVvUdKK6p56sNcnv14Iy2bOe6ekMhPxvUhooWuahWRhqWib2JmxoK1u7hvfgY7C8q49JSe/HZiEt3at/Y6moiEKBV9E1q/u4jUd318tnEfST3a89jVIxjdp7PXsUQkxKnom0BBaSWPvb+eVz7bQttWLZh1yWB+NKY3zXVVq4g0ARV9I/L7jbdWb+fh97LYV1LBNaPj+dWFg+gcFeF1NBEJIyr6RvLNtoOkpvtYs+0go3p34qUfj2ZIbAevY4lIGFLRN7C9xeU88l42b67aRpeoVsy+cjiXjYjVVa0i4hkVfQOpqvbz6udbmL1kPaUV1dx8Rh9uO38A7TR8TEQ8pqJvAJ9t2Edauo/s3UWcOSCa1CnJ9I/R8DERCQwq+pOQV1DKffMzmfdtHrEd2/DMdaO4aHA3LdOISEBR0Z+A8qpqnlu+ib9+kIvfjNvHD+BnZ/ejdUsNHxORwKOir6elmbuZOS+DLfsOcdHgbvx+UjK9Omv4mIgELhV9HW3aW8LMuT6WZe+hX9coXr1pNGcO6Op1LBGR41LRH0dJeRVPLsvlueWbiGjRjN9NTOKG0xM0fExEgoaK/hjMjLnf5nH//Ex2FZbxgxGx3D0hkRgNHxORIKOiP4rMvELS0n18sWk/g3u258lrRzCqt4aPiUhwUtEfpuBQJbOXZPPq51to36Yl9102hKtPjdfwMREJaip6aoaPvblyGw8vyubgoQquHdObOy8cSMdIDR8TkeAX9kX/9dYDpKb7+HZ7AacmdCJt6mgG99TwMREJHWFb9HuKynnovSzeWrWdmHateOyqU7jklJ66qlVEQk7YFX1ltZ9XPtvCY0vWU1ZVzU/P7sut5w2gbauweytEJEyEVbt9mruX1HQfOfnFnD2wK/dOSaZf17ZexxIRaVRhUfQ7DpZy3/wMFqzdRa/Obfjb9SmMT4rRMo2IhIWQLvqyymrmfLyRpz7MBeCXFwxk+ll9NXxMRMJKSBa9mfF+Zj4z5/nYtr+UiUO7M2NiEnGdNHxMRMJPyBX9xj3F/GFuBh+t38OAmLa8dvMYxvWP9jqWiIhnQqboi8ureOKDHF5YsYnWLZrz+0k1w8daNtfwMREJb0Ff9GbGu2t28sDCTHYXlnPFqDjuujiRru1aeR1NRCQgBHXR+3YWkJbu46vNBxgW14GnrxvFyPhOXscSEQkoxy1651wv4BWgO+AH5pjZ40dskwi8CIwEfmdmjzZC1v918FAFf1q8nte+2ELHyAge/MFQrkzpRTMNHxMR+Td1OaKvAu40s9XOuXbAKufcEjPLOGyb/cBtwKWNEfJ/VPuNN77ayqOLsikoreT60xK4Y/xAOkS2bMzdiogEteMWvZnlAXm1t4ucc5lALJBx2Db5QL5zblJjBV21ZT+p6T7W7ShkdJ/O/GHqYJJ6tG+s3YmIhIx6rdE75xKAEcAXJ7Iz59x0YDpAfHx8nZ6TX1TGgwuzeGf1Drq3b81frhnBlGE9dFWriEgd1bnonXNtgbeB282s8ER2ZmZzgDkAKSkp9n3bVlb7eemTzTy+NIeKKj//cU4/bjm3P1EaPiYiUi91ak3nXEtqSv41M3uncSPB8pw9pKX72LCnhPMSY7hncjJ9oqMae7ciIiGpLmfdOOB5INPMZjdmmG37D/HH+Rks8u2md5dInr8hhfOTujXmLkVEQl5djujHAdOAtc65NbX3zQDiAczsGedcd2Al0B7wO+duB5LrusRTVlnNMx9t4OkPN9DMOX590SBuOqOPho+JiDSAupx1swL43k8+zWwXEFffnZsZi3y7+eP8DLYfKGXysB7MmJhEz45t6vtSIiJyDJ59slle5ef6F75kec5eBnVrx+v/bwyn99PwMRGRhuZZ0efsLoJtB0mdksy0sb1poeFjIiKNwrOi7xgZwbJfnUN0Ww0fExFpTJ4dRsd1aqOSFxFpAlovEREJcSp6EZEQp6IXEQlxKnoRkRCnohcRCXEqehGREKeiFxEJcSp6EZEQ58y+9+//aLwdO1cEZHuy88ATDez1OkSA0HvxHb0X39F78Z1BZtauPk/w8q9ryjazFA/3HzCccyv1XtTQe/EdvRff0XvxHefcyvo+R0s3IiIhTkUvIhLivCz6OR7uO9DovfiO3ovv6L34jt6L79T7vfDsw1gREWkaWroREQlxnhS9c+5i51y2cy7XOXe3FxkCgXOul3NumXMu0znnc879wutMXnLONXfOfe2cm+d1Fq855zo6595yzmXVfn+c5nUmrzjn7qj9+VjnnPuHc66115mainPuBedcvnNu3WH3dXbOLXHO5dT+2el4r9PkRe+caw48CUwAkoFrnHPJTZ0jQFQBd5pZEjAWuCWM3wuAXwCZXocIEI8D75lZIjCcMH1fnHOxwG1AipkNAZoDV3ubqkm9BFx8xH13A0vNbACwtPbr7+XFEf1oINfMNppZBfAGcIkHOTxnZnlmtrr2dhE1P8yx3qbyhnMuDpgEPOd1Fq8559oDZwHPA5hZhZkd9DaVp1oAbZxzLYBIYKfHeZqMmX0M7D/i7kuAl2tvvwxcerzX8aLoY4Fth329nTAtt8M55xKAEcAX3ibxzGPAbwC/10ECQF9gD/Bi7VLWc865KK9DecHMdgCPAluBPKDAzBZ7m8pz3cwsD2oOFoGY4z3Bi6J3R7kvrE/9cc61Bd4GbjezQq/zNDXn3GQg38xWeZ0lQLQARgJPm9kIoIQ6/HoeimrXny8B+gAP8OWGAAABWUlEQVQ9gSjn3HXepgo+XhT9dqDXYV/HEUa/ih3JOdeSmpJ/zcze8TqPR8YBU51zm6lZyjvPOfd3byN5ajuw3cz+57e7t6gp/nA0HthkZnvMrBJ4Bzjd40xe2+2c6wFQ+2f+8Z7gRdF/BQxwzvVxzkVQ88FKugc5POecc9Ssw2aa2Wyv83jFzH5rZnFmlkDN98MHZha2R21mtgvY5pwbVHvX+UCGh5G8tBUY65yLrP15OZ8w/WD6MOnADbW3bwDePd4TmnyomZlVOef+E1hEzSfoL5iZr6lzBIhxwDRgrXNuTe19M8xsgYeZJDDcCrxWezC0Efixx3k8YWZfOOfeAlZTc5ba14TRVbLOuX8A5wDRzrntQCrwIPCmc+4mav5H+MPjvo6ujBURCW26MlZEJMSp6EVEQpyKXkQkxKnoRURCnIpeRCTEqehFREKcil5EJMSp6EVEQtz/B+xSx1XvewGeAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "mPlotTop=10\n", - "plotFuncs(PFexample.solution[0].cFunc,0.,mPlotTop)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The figure illustrates one of the surprising features of the perfect foresight model: A person with zero money should be spending at a rate more than double their income (that is, $\\texttt{cFunc}(0.) \\approx 2.08$ - the intersection on the vertical axis). How can this be?\n", - "\n", - "The answer is that we have not incorporated any constraint that would prevent the agent from borrowing against the entire PDV of future earnings-- human wealth. How much is that? What's the minimum value of $m_t$ where the consumption function is defined? We can check by retrieving the $\\texttt{hNrm}$ **attribute** of the solution, which calculates the value of human wealth normalized by permanent income:" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "This agent's human wealth is 50.49994992551661 times his current income level.\n", - "This agent's consumption function is defined (consumption is positive) down to m_t = -50.49994992551661\n" - ] - } - ], - "source": [ - "humanWealth = PFexample.solution[0].hNrm\n", - "mMinimum = PFexample.solution[0].mNrmMin\n", - "print(\"This agent's human wealth is \" + str(humanWealth) + ' times his current income level.')\n", - "print(\"This agent's consumption function is defined (consumption is positive) down to m_t = \" + str(mMinimum))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Yikes! Let's take a look at the bottom of the consumption function. In the cell below, set the bounds of the $\\texttt{plotFuncs}$ function to display down to the lowest defined value of the consumption function." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# YOUR FIRST HANDS-ON EXERCISE!\n", - "# Fill in the value for \"mPlotBottom\" to plot the consumption function from the point where it is zero.\n", - "mPlotBottom = 0.0 # 0 is not the right number here - replace with the right answer (hint -- look at previous cell!)\n", - "plotFuncs(PFexample.solution[0].cFunc,mPlotBottom,mPlotTop)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Changing Agent Parameters\n", - "\n", - "Suppose you wanted to change one (or more) of the parameters of the agent's problem and see what that does. We want to compare consumption functions before and after we change parameters, so let's make a new instance of $\\texttt{PerfForesightConsumerType}$ by copying $\\texttt{PFexample}$." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "NewExample = deepcopy(PFexample)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In Python, you can set an **attribute** of an object just like any other variable. For example, we could make the new agent less patient:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "NewExample.DiscFac = 0.90\n", - "NewExample.solve()\n", - "mPlotBottom = mMinimum\n", - "plotFuncs([PFexample.solution[0].cFunc,NewExample.solution[0].cFunc],mPlotBottom,mPlotTop)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "(Note that you can pass a **list** of functions to $\\texttt{plotFuncs}$ as the first argument rather than just a single function. Lists are written inside of [square brackets].)\n", - "\n", - "Let's try to deal with the \"problem\" of massive human wealth by making another consumer who has essentially no future income. We can virtually eliminate human wealth by making the permanent income growth factor $\\textit{very}$ small.\n", - "\n", - "In $\\texttt{PFexample}$, the agent's income grew by 1 percent per period -- his $\\texttt{PermGroFac}$ took the value 1.01. What if our new agent had a growth factor of 0.01 -- his income __shrinks__ by 99 percent each period? In the cell below, set $\\texttt{NewExample}$'s discount factor back to its original value, then set its $\\texttt{PermGroFac}$ attribute so that the growth factor is 0.01 each period.\n", - "\n", - "Important: Recall that the model at the top of this document said that an agent's problem is characterized by a sequence of income growth factors, but we tabled that concept. Because $\\texttt{PerfForesightConsumerType}$ treats $\\texttt{PermGroFac}$ as a __time-varying__ attribute, it must be specified as a **list** (with a single element in this case)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Revert NewExample's discount factor and make his future income minuscule\n", - "# print(\"your lines here\")\n", - "\n", - "# Compare the old and new consumption functions\n", - "plotFuncs([PFexample.solution[0].cFunc,NewExample.solution[0].cFunc],0.,10.)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now $\\texttt{NewExample}$'s consumption function has the same slope (MPC) as $\\texttt{PFexample}$, but it emanates from (almost) zero-- he has basically no future income to borrow against!\n", - "\n", - "If you'd like, use the cell above to alter $\\texttt{NewExample}$'s other attributes (relative risk aversion, etc) and see how the consumption function changes. However, keep in mind that \\textit{no solution exists} for some combinations of parameters. HARK should let you know if this is the case if you try to solve such a model.\n", - "\n", - "\n", - "## Your Second HARK Model: Adding Income Shocks\n", - "\n", - "Linear consumption functions are pretty boring, and you'd be justified in feeling unimpressed if all HARK could do was plot some lines. Let's look at another model that adds two important layers of complexity: income shocks and (artificial) borrowing constraints.\n", - "\n", - "Specifically, our new type of consumer receives two income shocks at the beginning of each period: a completely transitory shock $\\theta_t$ and a completely permanent shock $\\psi_t$. Moreover, lenders will not let the agent borrow money such that his ratio of end-of-period assets $A_t$ to permanent income $P_t$ is less than $\\underline{a}$. As with the perfect foresight problem, this model can be framed in terms of __normalized__ variables, e.g. $m_t \\equiv M_t/P_t$. (See [here](http://econ.jhu.edu/people/ccarroll/papers/BufferStockTheory/) for all the theory).\n", - "\n", - "\\begin{eqnarray*}\n", - "v_t(m_t) &=& \\max_{c_t} ~ U(c_t) ~ + \\phantom{\\LivFac} \\beta \\mathbb{E} [(\\Gamma_{t+1}\\psi_{t+1})^{1-\\rho} v_{t+1}(m_{t+1}) ], \\\\\n", - "a_t &=& m_t - c_t, \\\\\n", - "a_t &\\geq& \\underset{\\bar{}}{a}, \\\\\n", - "m_{t+1} &=& R/(\\Gamma_{t+1} \\psi_{t+1}) a_t + \\theta_{t+1}, \\\\\n", - "\\mathbb{E}[\\psi]=\\mathbb{E}[\\theta] &=& 1, \\\\\n", - "u(c) &=& \\frac{c^{1-\\rho}}{1-\\rho}.\n", - "\\end{eqnarray*}\n", - "\n", - "HARK represents agents with this kind of problem as instances of the class $\\texttt{IndShockConsumerType}$. To create an $\\texttt{IndShockConsumerType}$, we must specify the same set of parameters as for a $\\texttt{PerfForesightConsumerType}$, as well as an artificial borrowing constraint $\\underline{a}$ and a sequence of income shocks. It's easy enough to pick a borrowing constraint -- say, zero -- but how would we specify the distributions of the shocks? Can't the joint distribution of permanent and transitory shocks be just about anything?\n", - "\n", - "_Yes_, and HARK can handle whatever correlation structure a user might care to specify. However, the default behavior of $\\texttt{IndShockConsumerType}$ is that the distribution of permanent income shocks is mean one lognormal, and the distribution of transitory shocks is mean one lognormal augmented with a point mass representing unemployment. The distributions are independent of each other by default, and by default are approximated with $N$ point equiprobable distributions.\n", - "\n", - "Let's make an infinite horizon instance of $\\texttt{IndShockConsumerType}$ with the same parameters as our original perfect foresight agent, plus the extra parameters to specify the income shock distribution and the artificial borrowing constraint. As before, we'll make a dictionary:\n", - "\n", - "\n", - "| Param | Description | Code | Value |\n", - "| :---: | --- | --- | :---: |\n", - "| \\underline{a} | Artificial borrowing constraint | $\\texttt{BoroCnstArt}$ | 0.0 |\n", - "| $\\sigma_\\psi$ | Underlying stdev of permanent income shocks | $\\texttt{PermShkStd}$ | 0.1 |\n", - "| $\\sigma_\\theta$ | Underlying stdev of transitory income shocks | $\\texttt{TranShkStd}$ | 0.1 |\n", - "| $N_\\psi$ | Number of discrete permanent income shocks | $\\texttt{PermShkCount}$ | 7 |\n", - "| $N_\\theta$ | Number of discrete transitory income shocks | $\\texttt{TranShkCount}$ | 7 |\n", - "| $\\mho$ | Unemployment probability | $\\texttt{UnempPrb}$ | 0.05 |\n", - "| $\\underset{\\bar{}}{\\theta}$ | Transitory shock when unemployed | $\\texttt{IncUnemp}$ | 0.3 |" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "code_folding": [ - 0 - ] - }, - "outputs": [], - "source": [ - "# This cell defines a parameter dictionary for making an instance of IndShockConsumerType.\n", - "\n", - "IndShockDictionary = {\n", - " 'CRRA': 2.5, # The dictionary includes our original parameters...\n", - " 'Rfree': 1.03,\n", - " 'DiscFac': 0.96,\n", - " 'LivPrb': [0.98],\n", - " 'PermGroFac': [1.01],\n", - " 'PermShkStd': [0.1], # ... and the new parameters for constructing the income process. \n", - " 'PermShkCount': 7,\n", - " 'TranShkStd': [0.1],\n", - " 'TranShkCount': 7,\n", - " 'UnempPrb': 0.05,\n", - " 'IncUnemp': 0.3,\n", - " 'BoroCnstArt': 0.0,\n", - " 'aXtraMin': 0.001, # aXtra parameters specify how to construct the grid of assets.\n", - " 'aXtraMax': 50., # Don't worry about these for now\n", - " 'aXtraNestFac': 3,\n", - " 'aXtraCount': 48,\n", - " 'aXtraExtra': [None],\n", - " 'vFuncBool': False, # These booleans indicate whether the value function should be calculated\n", - " 'CubicBool': False, # and whether to use cubic spline interpolation. You can ignore them.\n", - " 'aNrmInitMean' : -10.,\n", - " 'aNrmInitStd' : 0.0, # These parameters specify the (log) distribution of normalized assets\n", - " 'pLvlInitMean' : 0.0, # and permanent income for agents at \"birth\". They are only relevant in\n", - " 'pLvlInitStd' : 0.0, # simulation and you don't need to worry about them.\n", - " 'PermGroFacAgg' : 1.0,\n", - " 'T_retire': 0, # What's this about retirement? ConsIndShock is set up to be able to\n", - " 'UnempPrbRet': 0.0, # handle lifecycle models as well as infinite horizon problems. Swapping\n", - " 'IncUnempRet': 0.0, # out the structure of the income process is easy, but ignore for now.\n", - " 'T_age' : None,\n", - " 'T_cycle' : 1,\n", - " 'cycles' : 0,\n", - " 'AgentCount': 10000,\n", - " 'tax_rate':0.0,\n", - "}\n", - " \n", - "# Hey, there's a lot of parameters we didn't tell you about! Yes, but you don't need to\n", - "# think about them for now." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As before, we need to import the relevant subclass of $\\texttt{AgentType}$ into our workspace, then create an instance by passing the dictionary to the class as if the class were a function." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from HARK.ConsumptionSaving.ConsIndShockModel import IndShockConsumerType\n", - "IndShockExample = IndShockConsumerType(**IndShockDictionary)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we can solve our new agent's problem just like before, using the $\\texttt{solve}$ method." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "IndShockExample.solve()\n", - "plotFuncs(IndShockExample.solution[0].cFunc,0.,10.)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Changing Constructed Attributes\n", - "\n", - "In the parameter dictionary above, we chose values for HARK to use when constructing its numeric representation of $F_t$, the joint distribution of permanent and transitory income shocks. When $\\texttt{IndShockExample}$ was created, those parameters ($\\texttt{TranShkStd}$, etc) were used by the **constructor** or **initialization** method of $\\texttt{IndShockConsumerType}$ to construct an attribute called $\\texttt{IncomeDstn}$.\n", - "\n", - "Suppose you were interested in changing (say) the amount of permanent income risk. From the section above, you might think that you could simply change the attribute $\\texttt{TranShkStd}$, solve the model again, and it would work.\n", - "\n", - "That's _almost_ true-- there's one extra step. $\\texttt{TranShkStd}$ is a primitive input, but it's not the thing you _actually_ want to change. Changing $\\texttt{TranShkStd}$ doesn't actually update the income distribution... unless you tell it to (just like changing an agent's preferences does not change the consumption function that was stored for the old set of parameters -- until you invoke the $\\texttt{solve}$ method again). In the cell below, we invoke the method $\\texttt{updateIncomeProcess}$ so HARK knows to reconstruct the attribute $\\texttt{IncomeDstn}$." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "OtherExample = deepcopy(IndShockExample) # Make a copy so we can compare consumption functions\n", - "OtherExample.PermShkStd = [0.2] # Double permanent income risk (note that it's a one element list)\n", - "OtherExample.updateIncomeProcess() # Call the method to reconstruct the representation of F_t\n", - "OtherExample.solve()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In the cell below, use your blossoming HARK skills to plot the consumption function for $\\texttt{IndShockExample}$ and $\\texttt{OtherExample}$ on the same figure." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Use the line(s) below to plot the consumptions functions against each other" - ] - } - ], - "metadata": { - "jupytext": { - "cell_metadata_filter": "collapsed,code_folding", - "formats": "ipynb,py:percent" - }, - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.9" - }, - "latex_envs": { - "LaTeX_envs_menu_present": true, - "autoclose": false, - "autocomplete": true, - "bibliofile": "biblio.bib", - "cite_by": "apalike", - "current_citInitial": 1, - "eqLabelWithNumbers": true, - "eqNumInitial": 1, - "hotkeys": { - "equation": "Ctrl-E", - "itemize": "Ctrl-I" - }, - "labels_anchors": false, - "latex_user_defs": false, - "report_style_numbering": false, - "user_envs_cfg": false - }, - "varInspector": { - "cols": { - "lenName": 16, - "lenType": 16, - "lenVar": 40 - }, - "kernels_config": { - "python": { - "delete_cmd_postfix": "", - "delete_cmd_prefix": "del ", - "library": "var_list.py", - "varRefreshCmd": "print(var_dic_list())" - }, - "r": { - "delete_cmd_postfix": ") ", - "delete_cmd_prefix": "rm(", - "library": "var_list.r", - "varRefreshCmd": "cat(var_dic_list()) " - } - }, - "types_to_exclude": [ - "module", - "function", - "builtin_function_or_method", - "instance", - "_Feature" - ], - "window_display": false - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/Gentle-Intro-To-HARK.py b/notebooks/Gentle-Intro-To-HARK.py deleted file mode 100644 index cf2de0e3..00000000 --- a/notebooks/Gentle-Intro-To-HARK.py +++ /dev/null @@ -1,306 +0,0 @@ -# --- -# jupyter: -# jupytext: -# cell_metadata_filter: collapsed,code_folding -# formats: ipynb,py:percent -# text_representation: -# extension: .py -# format_name: percent -# format_version: '1.2' -# jupytext_version: 1.2.1 -# kernelspec: -# display_name: Python 3 -# language: python -# name: python3 -# --- - -# %% [markdown] -# # A Gentle Introduction to HARK -# -# This notebook provides a simple, hands-on tutorial for first time HARK users -- and potentially first time Python users. It does not go "into the weeds" - we have hidden some code cells that do boring things that you don't need to digest on your first experience with HARK. Our aim is to convey a feel for how the toolkit works. -# -# For readers for whom this is your very first experience with Python, we have put important Python concepts in **boldface**. For those for whom this is the first time they have used a Jupyter notebook, we have put Jupyter instructions in _italics_. Only cursory definitions (if any) are provided here. If you want to learn more, there are many online Python and Jupyter tutorials. - -# %% {"code_folding": []} -# This cell has a bit of initial setup. You can click the triangle to the left to expand it. -# Click the "Run" button immediately above the notebook in order to execute the contents of any cell -# WARNING: Each cell in the notebook relies upon results generated by previous cells -# The most common problem beginners have is to execute a cell before all its predecessors -# If you do this, you can restart the kernel (see the "Kernel" menu above) and start over -# %matplotlib inline -import matplotlib.pyplot as plt -from tqdm import tqdm -import numpy as np -import HARK -from time import clock -from copy import deepcopy -mystr = lambda number : "{:.4f}".format(number) -from HARK.utilities import plotFuncs - -# %% [markdown] -# ## Your First HARK Model: Perfect Foresight -# -# $$\newcommand{\CRRA}{\rho}\newcommand{\DiscFac}{\beta}$$ -# We start with almost the simplest possible consumption model: A consumer with CRRA utility -# -# \begin{equation} -# U(C) = \frac{C^{1-\CRRA}}{1-\rho} -# \end{equation} -# -# has perfect foresight about everything except the (stochastic) date of death, which occurs with constant probability implying a "survival probability" $\newcommand{\LivPrb}{\aleph}\LivPrb < 1$. Permanent labor income $P_t$ grows from period to period by a factor $\Gamma_t$. At the beginning of each period $t$, the consumer has some amount of market resources $M_t$ (which includes both market wealth and currrent income) and must choose how much of those resources to consume $C_t$ and how much to retain in a riskless asset $A_t$ which will earn return factor $R$. The agent's flow of utility $U(C_t)$ from consumption is geometrically discounted by factor $\beta$. Between periods, the agent dies with probability $\mathsf{D}_t$, ending his problem. -# -# The agent's problem can be written in Bellman form as: -# -# \begin{eqnarray*} -# V_t(M_t,P_t) &=& \max_{C_t}~U(C_t) + \beta \aleph V_{t+1}(M_{t+1},P_{t+1}), \\ -# & s.t. & \\ -# %A_t &=& M_t - C_t, \\ -# M_{t+1} &=& R (M_{t}-C_{t}) + Y_{t+1}, \\ -# P_{t+1} &=& \Gamma_{t+1} P_t, \\ -# \end{eqnarray*} -# -# A particular perfect foresight agent's problem can be characterized by values of risk aversion $\rho$, discount factor $\beta$, and return factor $R$, along with sequences of income growth factors $\{ \Gamma_t \}$ and survival probabilities $\{\mathsf{\aleph}_t\}$. To keep things simple, let's forget about "sequences" of income growth and mortality, and just think about an $\textit{infinite horizon}$ consumer with constant income growth and survival probability. -# -# ## Representing Agents in HARK -# -# HARK represents agents solving this type of problem as $\textbf{instances}$ of the $\textbf{class}$ $\texttt{PerfForesightConsumerType}$, a $\textbf{subclass}$ of $\texttt{AgentType}$. To make agents of this class, we must import the class itself into our workspace. (Run the cell below in order to do this). - -# %% -from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType - -# %% [markdown] -# The $\texttt{PerfForesightConsumerType}$ class contains within itself the python code that constructs the solution for the perfect foresight model we are studying here, as specifically articulated in [these lecture notes](http://econ.jhu.edu/people/ccarroll/public/lecturenotes/consumption/PerfForesightCRRA/). -# -# To create an instance of $\texttt{PerfForesightConsumerType}$, we simply call the class as if it were a function, passing as arguments the specific parameter values we want it to have. In the hidden cell below, we define a $\textbf{dictionary}$ named $\texttt{PF_dictionary}$ with these parameter values: -# -# | Param | Description | Code | Value | -# | :---: | --- | --- | :---: | -# | $\rho$ | Relative risk aversion | $\texttt{CRRA}$ | 2.5 | -# | $\beta$ | Discount factor | $\texttt{DiscFac}$ | 0.96 | -# | $R$ | Risk free interest factor | $\texttt{Rfree}$ | 1.03 | -# | $\newcommand{\LivFac}{\aleph}\LivFac$ | Survival probability | $\texttt{LivPrb}$ | 0.98 | -# | $\Gamma$ | Income growth factor | $\texttt{PermGroFac}$ | 1.01 | -# -# -# For now, don't worry about the specifics of dictionaries. All you need to know is that a dictionary lets us pass many arguments wrapped up in one simple data structure. - -# %% {"code_folding": []} -# This cell defines a parameter dictionary. You can expand it if you want to see what that looks like. -PF_dictionary = { - 'CRRA' : 2.5, - 'DiscFac' : 0.96, - 'Rfree' : 1.03, - 'LivPrb' : [0.98], - 'PermGroFac' : [1.01], - 'T_cycle' : 1, - 'cycles' : 0, - 'AgentCount' : 10000 -} - -# To those curious enough to open this hidden cell, you might notice that we defined -# a few extra parameters in that dictionary: T_cycle, cycles, and AgentCount. Don't -# worry about these for now. - -# %% [markdown] -# Let's make an **object** named $\texttt{PFexample}$ which is an **instance** of the $\texttt{PerfForesightConsumerType}$ class. The object $\texttt{PFexample}$ will bundle together the abstract mathematical description of the solution embodied in $\texttt{PerfForesightConsumerType}$, and the specific set of parameter values defined in $\texttt{PF_dictionary}$. Such a bundle is created passing $\texttt{PF_dictionary}$ to the class $\texttt{PerfForesightConsumerType}$: - -# %% -PFexample = PerfForesightConsumerType(**PF_dictionary) -# the asterisks ** basically say "here come some arguments" to PerfForesightConsumerType - -# %% [markdown] -# In $\texttt{PFexample}$, we now have _defined_ the problem of a particular infinite horizon perfect foresight consumer who knows how to solve this problem. -# -# ## Solving an Agent's Problem -# -# To tell the agent actually to solve the problem, we call the agent's $\texttt{solve}$ **method**. (A *method** is essentially a function that an object runs that affects the object's own internal characteristics -- in this case, the method adds the consumption function to the contents of $\texttt{PFexample}$.) -# -# The cell below calls the $\texttt{solve}$ method for $\texttt{PFexample}$ - -# %% -PFexample.solve() - -# %% [markdown] -# Running the $\texttt{solve}$ method creates the **attribute** of $\texttt{PFexample}$ named $\texttt{solution}$. In fact, every subclass of $\texttt{AgentType}$ works the same way: The class definition contains the abstract algorithm that knows how to solve the model, but to obtain the particular solution for a specific instance (paramterization/configuration), that instance must be instructed to $\texttt{solve()}$ its problem. -# -# The $\texttt{solution}$ attribute is always a $\textit{list}$ of solutions to a single period of the problem. In the case of an infinite horizon model like the one here, there is just one element in that list -- the solution to all periods of the infinite horizon problem. The consumption function stored as the first element (element 0) of the solution list can be retrieved by: - -# %% -PFexample.solution[0].cFunc - -# %% [markdown] -# One of the results proven in the associated [the lecture notes](http://econ.jhu.edu/people/ccarroll/public/lecturenotes/consumption/PerfForesightCRRA/) is that, for the specific problem defined above, there is a solution in which the _ratio_ $c = C/P$ is a linear function of the _ratio_ of market resources to permanent income, $m = M/P$. -# -# This is why $\texttt{cFunc}$ can be represented by a linear interpolation. It can be plotted between an $m$ ratio of 0 and 10 using the command below. - -# %% -mPlotTop=10 -plotFuncs(PFexample.solution[0].cFunc,0.,mPlotTop) - -# %% [markdown] -# The figure illustrates one of the surprising features of the perfect foresight model: A person with zero money should be spending at a rate more than double their income (that is, $\texttt{cFunc}(0.) \approx 2.08$ - the intersection on the vertical axis). How can this be? -# -# The answer is that we have not incorporated any constraint that would prevent the agent from borrowing against the entire PDV of future earnings-- human wealth. How much is that? What's the minimum value of $m_t$ where the consumption function is defined? We can check by retrieving the $\texttt{hNrm}$ **attribute** of the solution, which calculates the value of human wealth normalized by permanent income: - -# %% -humanWealth = PFexample.solution[0].hNrm -mMinimum = PFexample.solution[0].mNrmMin -print("This agent's human wealth is " + str(humanWealth) + ' times his current income level.') -print("This agent's consumption function is defined (consumption is positive) down to m_t = " + str(mMinimum)) - -# %% [markdown] -# Yikes! Let's take a look at the bottom of the consumption function. In the cell below, set the bounds of the $\texttt{plotFuncs}$ function to display down to the lowest defined value of the consumption function. - -# %% -# YOUR FIRST HANDS-ON EXERCISE! -# Fill in the value for "mPlotBottom" to plot the consumption function from the point where it is zero. -mPlotBottom = 0.0 # 0 is not the right number here - replace with the right answer (hint -- look at previous cell!) -plotFuncs(PFexample.solution[0].cFunc,mPlotBottom,mPlotTop) - -# %% [markdown] -# ## Changing Agent Parameters -# -# Suppose you wanted to change one (or more) of the parameters of the agent's problem and see what that does. We want to compare consumption functions before and after we change parameters, so let's make a new instance of $\texttt{PerfForesightConsumerType}$ by copying $\texttt{PFexample}$. - -# %% -NewExample = deepcopy(PFexample) - -# %% [markdown] -# In Python, you can set an **attribute** of an object just like any other variable. For example, we could make the new agent less patient: - -# %% -NewExample.DiscFac = 0.90 -NewExample.solve() -mPlotBottom = mMinimum -plotFuncs([PFexample.solution[0].cFunc,NewExample.solution[0].cFunc],mPlotBottom,mPlotTop) - -# %% [markdown] -# (Note that you can pass a **list** of functions to $\texttt{plotFuncs}$ as the first argument rather than just a single function. Lists are written inside of [square brackets].) -# -# Let's try to deal with the "problem" of massive human wealth by making another consumer who has essentially no future income. We can virtually eliminate human wealth by making the permanent income growth factor $\textit{very}$ small. -# -# In $\texttt{PFexample}$, the agent's income grew by 1 percent per period -- his $\texttt{PermGroFac}$ took the value 1.01. What if our new agent had a growth factor of 0.01 -- his income __shrinks__ by 99 percent each period? In the cell below, set $\texttt{NewExample}$'s discount factor back to its original value, then set its $\texttt{PermGroFac}$ attribute so that the growth factor is 0.01 each period. -# -# Important: Recall that the model at the top of this document said that an agent's problem is characterized by a sequence of income growth factors, but we tabled that concept. Because $\texttt{PerfForesightConsumerType}$ treats $\texttt{PermGroFac}$ as a __time-varying__ attribute, it must be specified as a **list** (with a single element in this case). - -# %% -# Revert NewExample's discount factor and make his future income minuscule -# print("your lines here") - -# Compare the old and new consumption functions -plotFuncs([PFexample.solution[0].cFunc,NewExample.solution[0].cFunc],0.,10.) - -# %% [markdown] -# Now $\texttt{NewExample}$'s consumption function has the same slope (MPC) as $\texttt{PFexample}$, but it emanates from (almost) zero-- he has basically no future income to borrow against! -# -# If you'd like, use the cell above to alter $\texttt{NewExample}$'s other attributes (relative risk aversion, etc) and see how the consumption function changes. However, keep in mind that \textit{no solution exists} for some combinations of parameters. HARK should let you know if this is the case if you try to solve such a model. -# -# -# ## Your Second HARK Model: Adding Income Shocks -# -# Linear consumption functions are pretty boring, and you'd be justified in feeling unimpressed if all HARK could do was plot some lines. Let's look at another model that adds two important layers of complexity: income shocks and (artificial) borrowing constraints. -# -# Specifically, our new type of consumer receives two income shocks at the beginning of each period: a completely transitory shock $\theta_t$ and a completely permanent shock $\psi_t$. Moreover, lenders will not let the agent borrow money such that his ratio of end-of-period assets $A_t$ to permanent income $P_t$ is less than $\underline{a}$. As with the perfect foresight problem, this model can be framed in terms of __normalized__ variables, e.g. $m_t \equiv M_t/P_t$. (See [here](http://econ.jhu.edu/people/ccarroll/papers/BufferStockTheory/) for all the theory). -# -# \begin{eqnarray*} -# v_t(m_t) &=& \max_{c_t} ~ U(c_t) ~ + \phantom{\LivFac} \beta \mathbb{E} [(\Gamma_{t+1}\psi_{t+1})^{1-\rho} v_{t+1}(m_{t+1}) ], \\ -# a_t &=& m_t - c_t, \\ -# a_t &\geq& \underset{\bar{}}{a}, \\ -# m_{t+1} &=& R/(\Gamma_{t+1} \psi_{t+1}) a_t + \theta_{t+1}, \\ -# \mathbb{E}[\psi]=\mathbb{E}[\theta] &=& 1, \\ -# u(c) &=& \frac{c^{1-\rho}}{1-\rho}. -# \end{eqnarray*} -# -# HARK represents agents with this kind of problem as instances of the class $\texttt{IndShockConsumerType}$. To create an $\texttt{IndShockConsumerType}$, we must specify the same set of parameters as for a $\texttt{PerfForesightConsumerType}$, as well as an artificial borrowing constraint $\underline{a}$ and a sequence of income shocks. It's easy enough to pick a borrowing constraint -- say, zero -- but how would we specify the distributions of the shocks? Can't the joint distribution of permanent and transitory shocks be just about anything? -# -# _Yes_, and HARK can handle whatever correlation structure a user might care to specify. However, the default behavior of $\texttt{IndShockConsumerType}$ is that the distribution of permanent income shocks is mean one lognormal, and the distribution of transitory shocks is mean one lognormal augmented with a point mass representing unemployment. The distributions are independent of each other by default, and by default are approximated with $N$ point equiprobable distributions. -# -# Let's make an infinite horizon instance of $\texttt{IndShockConsumerType}$ with the same parameters as our original perfect foresight agent, plus the extra parameters to specify the income shock distribution and the artificial borrowing constraint. As before, we'll make a dictionary: -# -# -# | Param | Description | Code | Value | -# | :---: | --- | --- | :---: | -# | \underline{a} | Artificial borrowing constraint | $\texttt{BoroCnstArt}$ | 0.0 | -# | $\sigma_\psi$ | Underlying stdev of permanent income shocks | $\texttt{PermShkStd}$ | 0.1 | -# | $\sigma_\theta$ | Underlying stdev of transitory income shocks | $\texttt{TranShkStd}$ | 0.1 | -# | $N_\psi$ | Number of discrete permanent income shocks | $\texttt{PermShkCount}$ | 7 | -# | $N_\theta$ | Number of discrete transitory income shocks | $\texttt{TranShkCount}$ | 7 | -# | $\mho$ | Unemployment probability | $\texttt{UnempPrb}$ | 0.05 | -# | $\underset{\bar{}}{\theta}$ | Transitory shock when unemployed | $\texttt{IncUnemp}$ | 0.3 | - -# %% {"code_folding": [0]} -# This cell defines a parameter dictionary for making an instance of IndShockConsumerType. - -IndShockDictionary = { - 'CRRA': 2.5, # The dictionary includes our original parameters... - 'Rfree': 1.03, - 'DiscFac': 0.96, - 'LivPrb': [0.98], - 'PermGroFac': [1.01], - 'PermShkStd': [0.1], # ... and the new parameters for constructing the income process. - 'PermShkCount': 7, - 'TranShkStd': [0.1], - 'TranShkCount': 7, - 'UnempPrb': 0.05, - 'IncUnemp': 0.3, - 'BoroCnstArt': 0.0, - 'aXtraMin': 0.001, # aXtra parameters specify how to construct the grid of assets. - 'aXtraMax': 50., # Don't worry about these for now - 'aXtraNestFac': 3, - 'aXtraCount': 48, - 'aXtraExtra': [None], - 'vFuncBool': False, # These booleans indicate whether the value function should be calculated - 'CubicBool': False, # and whether to use cubic spline interpolation. You can ignore them. - 'aNrmInitMean' : -10., - 'aNrmInitStd' : 0.0, # These parameters specify the (log) distribution of normalized assets - 'pLvlInitMean' : 0.0, # and permanent income for agents at "birth". They are only relevant in - 'pLvlInitStd' : 0.0, # simulation and you don't need to worry about them. - 'PermGroFacAgg' : 1.0, - 'T_retire': 0, # What's this about retirement? ConsIndShock is set up to be able to - 'UnempPrbRet': 0.0, # handle lifecycle models as well as infinite horizon problems. Swapping - 'IncUnempRet': 0.0, # out the structure of the income process is easy, but ignore for now. - 'T_age' : None, - 'T_cycle' : 1, - 'cycles' : 0, - 'AgentCount': 10000, - 'tax_rate':0.0, -} - -# Hey, there's a lot of parameters we didn't tell you about! Yes, but you don't need to -# think about them for now. - -# %% [markdown] -# As before, we need to import the relevant subclass of $\texttt{AgentType}$ into our workspace, then create an instance by passing the dictionary to the class as if the class were a function. - -# %% -from HARK.ConsumptionSaving.ConsIndShockModel import IndShockConsumerType -IndShockExample = IndShockConsumerType(**IndShockDictionary) - -# %% [markdown] -# Now we can solve our new agent's problem just like before, using the $\texttt{solve}$ method. - -# %% -IndShockExample.solve() -plotFuncs(IndShockExample.solution[0].cFunc,0.,10.) - -# %% [markdown] -# ## Changing Constructed Attributes -# -# In the parameter dictionary above, we chose values for HARK to use when constructing its numeric representation of $F_t$, the joint distribution of permanent and transitory income shocks. When $\texttt{IndShockExample}$ was created, those parameters ($\texttt{TranShkStd}$, etc) were used by the **constructor** or **initialization** method of $\texttt{IndShockConsumerType}$ to construct an attribute called $\texttt{IncomeDstn}$. -# -# Suppose you were interested in changing (say) the amount of permanent income risk. From the section above, you might think that you could simply change the attribute $\texttt{TranShkStd}$, solve the model again, and it would work. -# -# That's _almost_ true-- there's one extra step. $\texttt{TranShkStd}$ is a primitive input, but it's not the thing you _actually_ want to change. Changing $\texttt{TranShkStd}$ doesn't actually update the income distribution... unless you tell it to (just like changing an agent's preferences does not change the consumption function that was stored for the old set of parameters -- until you invoke the $\texttt{solve}$ method again). In the cell below, we invoke the method $\texttt{updateIncomeProcess}$ so HARK knows to reconstruct the attribute $\texttt{IncomeDstn}$. - -# %% -OtherExample = deepcopy(IndShockExample) # Make a copy so we can compare consumption functions -OtherExample.PermShkStd = [0.2] # Double permanent income risk (note that it's a one element list) -OtherExample.updateIncomeProcess() # Call the method to reconstruct the representation of F_t -OtherExample.solve() - -# %% [markdown] -# In the cell below, use your blossoming HARK skills to plot the consumption function for $\texttt{IndShockExample}$ and $\texttt{OtherExample}$ on the same figure. - -# %% -# Use the line(s) below to plot the consumptions functions against each other - diff --git a/notebooks/IncExpectationExample.ipynb b/notebooks/IncExpectationExample.ipynb index d2828f8c..96db14fc 100644 --- a/notebooks/IncExpectationExample.ipynb +++ b/notebooks/IncExpectationExample.ipynb @@ -31,11 +31,10 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { - "code_folding": [ - 0 - ] + "code_folding": [], + "lines_to_next_cell": 1 }, "outputs": [], "source": [ @@ -52,7 +51,6 @@ "import HARK.ConsumptionSaving.ConsumerParameters as Params\n", "\n", "from HARK.utilities import approxUniform, getLorenzShares, calcSubpopAvg\n", - "from time import clock\n", "mystr = lambda number : \"{:.4f}\".format(number)" ] }, @@ -61,9 +59,9 @@ "execution_count": 2, "metadata": { "code_folding": [ - 1, - 2 - ] + 1 + ], + "lines_to_next_cell": 1 }, "outputs": [], "source": [ @@ -85,8 +83,9 @@ "execution_count": 3, "metadata": { "code_folding": [ - 0 - ] + 1 + ], + "lines_to_next_cell": 1 }, "outputs": [], "source": [ @@ -137,8 +136,7 @@ "metadata": { "code_folding": [ 1 - ], - "lines_to_next_cell": 2 + ] }, "outputs": [], "source": [ @@ -249,9 +247,7 @@ "cell_type": "code", "execution_count": 5, "metadata": { - "code_folding": [ - 0 - ], + "code_folding": [], "lines_to_next_cell": 2 }, "outputs": [ @@ -359,16 +355,8 @@ } }, "jupytext": { - "formats": "ipynb,py:percent", - "metadata_filter": { - "cells": "collapsed" - }, - "text_representation": { - "extension": ".py", - "format_name": "percent", - "format_version": "1.1", - "jupytext_version": "0.8.3" - } + "cell_metadata_filter": "collapsed,code_folding", + "formats": "ipynb,py:percent" }, "kernelspec": { "display_name": "Python 3", @@ -418,5 +406,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/IncExpectationExample.py b/notebooks/IncExpectationExample.py index 635fb42e..7d4f8af3 100644 --- a/notebooks/IncExpectationExample.py +++ b/notebooks/IncExpectationExample.py @@ -1,13 +1,13 @@ # --- # jupyter: # jupytext: -# cell_metadata_filter: collapsed +# cell_metadata_filter: collapsed,code_folding # formats: ipynb,py:percent # text_representation: # extension: .py # format_name: percent # format_version: '1.2' -# jupytext_version: 1.1.3 +# jupytext_version: 1.2.3 # kernelspec: # display_name: Python 3 # language: python @@ -46,7 +46,6 @@ import HARK.ConsumptionSaving.ConsumerParameters as Params from HARK.utilities import approxUniform, getLorenzShares, calcSubpopAvg -from time import clock mystr = lambda number : "{:.4f}".format(number) # %% {"code_folding": [1]} diff --git a/notebooks/IndShockConsumerType.ipynb b/notebooks/IndShockConsumerType.ipynb index 868c9f5b..0797dd65 100644 --- a/notebooks/IndShockConsumerType.ipynb +++ b/notebooks/IndShockConsumerType.ipynb @@ -21,7 +21,6 @@ "# Initial imports and notebook setup, click arrow to show\n", "from HARK.ConsumptionSaving.ConsIndShockModel import IndShockConsumerType\n", "from HARK.utilities import plotFuncsDer, plotFuncs\n", - "from time import clock\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "mystr = lambda number : \"{:.4f}\".format(number)" @@ -641,7 +640,7 @@ } }, "jupytext": { - "cell_metadata_filter": "collapsed,code_folding", + "cell_metadata_filter": "collapsed,code_folding,heading_collapsed,hidden", "formats": "ipynb,py:percent" }, "kernelspec": { @@ -692,5 +691,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/IndShockConsumerType.py b/notebooks/IndShockConsumerType.py index 39965b59..a184caa5 100644 --- a/notebooks/IndShockConsumerType.py +++ b/notebooks/IndShockConsumerType.py @@ -1,13 +1,13 @@ # --- # jupyter: # jupytext: -# cell_metadata_filter: collapsed,code_folding +# cell_metadata_filter: collapsed,code_folding,heading_collapsed,hidden # formats: ipynb,py:percent # text_representation: # extension: .py # format_name: percent # format_version: '1.2' -# jupytext_version: 1.1.3 +# jupytext_version: 1.2.3 # kernelspec: # display_name: Python 3 # language: python @@ -22,7 +22,6 @@ # Initial imports and notebook setup, click arrow to show from HARK.ConsumptionSaving.ConsIndShockModel import IndShockConsumerType from HARK.utilities import plotFuncsDer, plotFuncs -from time import clock import matplotlib.pyplot as plt import numpy as np mystr = lambda number : "{:.4f}".format(number) diff --git a/notebooks/KinkedRconsumerType.ipynb b/notebooks/KinkedRconsumerType.ipynb index cc57b6c4..78dae10f 100644 --- a/notebooks/KinkedRconsumerType.ipynb +++ b/notebooks/KinkedRconsumerType.ipynb @@ -20,7 +20,6 @@ "# Initial imports and notebook setup, click arrow to show\n", "from HARK.ConsumptionSaving.ConsIndShockModel import KinkedRconsumerType\n", "from HARK.utilities import plotFuncsDer, plotFuncs\n", - "from time import clock\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "mystr = lambda number : \"{:.4f}\".format(number)" @@ -365,5 +364,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/KinkedRconsumerType.py b/notebooks/KinkedRconsumerType.py new file mode 100644 index 00000000..0348a982 --- /dev/null +++ b/notebooks/KinkedRconsumerType.py @@ -0,0 +1,227 @@ +# --- +# jupyter: +# jupytext: +# cell_metadata_filter: collapsed,code_folding +# formats: ipynb,py:percent +# text_representation: +# extension: .py +# format_name: percent +# format_version: '1.2' +# jupytext_version: 1.2.3 +# kernelspec: +# display_name: Python 3 +# language: python +# name: python3 +# --- + +# %% [markdown] +# # KinkedRconsumerType: Consumption-saving model with idiosyncratic income shocks and different interest rates on borrowing and saving + +# %% {"code_folding": [0]} +# Initial imports and notebook setup, click arrow to show +from HARK.ConsumptionSaving.ConsIndShockModel import KinkedRconsumerType +from HARK.utilities import plotFuncsDer, plotFuncs +import matplotlib.pyplot as plt +import numpy as np +mystr = lambda number : "{:.4f}".format(number) + +# %% [markdown] +# The module $\texttt{HARK.ConsumptionSaving.ConsIndShockModel}$ concerns consumption-saving models with idiosyncratic shocks to (non-capital) income. All of the models assume CRRA utility with geometric discounting, no bequest motive, and income shocks are fully transitory or fully permanent. +# +# $\texttt{ConsIndShockModel}$ currently includes three models: +# 1. A very basic "perfect foresight" model with no uncertainty. +# 2. A model with risk over transitory and permanent income shocks. +# 3. The model described in (2), with an interest rate for debt that differs from the interest rate for savings. +# +# This notebook provides documentation for the third of these models. +# $\newcommand{\CRRA}{\rho}$ +# $\newcommand{\DiePrb}{\mathsf{D}}$ +# $\newcommand{\PermGroFac}{\Gamma}$ +# $\newcommand{\Rfree}{\mathsf{R}}$ +# $\newcommand{\DiscFac}{\beta}$ + +# %% [markdown] +# ## Statement of "kinked R" model +# +# Consider a small extension to the model faced by $\texttt{IndShockConsumerType}$s: that the interest rate on borrowing $a_t < 0$ is greater than the interest rate on saving $a_t > 0$. Consumers who face this kind of problem are represented by the $\texttt{KinkedRconsumerType}$ class. +# +# For a full theoretical treatment, this model analyzed in [A Theory of the Consumption Function, With +# and Without Liquidity Constraints](http://www.econ2.jhu.edu/people/ccarroll/ATheoryv3JEP.pdf) +# and its [expanded edition](http://www.econ2.jhu.edu/people/ccarroll/ATheoryv3NBER.pdf). +# +# Continuing to work with *normalized* variables (e.g. $m_t$ represents the level of market resources divided by permanent income), the "kinked R" model can be stated as: +# +# \begin{eqnarray*} +# v_t(m_t) &=& \max_{c_t} {~} U(c_t) + \DiscFac (1-\DiePrb_{t+1}) \mathbb{E}_{t} \left[ (\PermGroFac_{t+1}\psi_{t+1})^{1-\CRRA} v_{t+1}(m_{t+1}) \right], \\ +# a_t &=& m_t - c_t, \\ +# a_t &\geq& \underline{a}, \\ +# m_{t+1} &=& \Rfree_t/(\PermGroFac_{t+1} \psi_{t+1}) a_t + \theta_{t+1}, \\ +# \Rfree_t &=& \cases{\Rfree_{boro} \texttt{ if } a_t < 0 \\ +# \Rfree_{save} \texttt{ if } a_t \geq 0},\\ +# \Rfree_{boro} &>& \Rfree_{save}, \\ +# (\psi_{t+1},\theta_{t+1}) &\sim& F_{t+1}, \\ +# \mathbb{E}[\psi]=\mathbb{E}[\theta] &=& 1. +# \end{eqnarray*} + +# %% [markdown] +# ## Solving the "kinked R" model +# +# The solution method for the "kinked R" model is nearly identical to that of the $\texttt{IndShockConsumerType}$ on which it is based, using the endogenous grid method; see the notebook for that model for more information. The only significant difference is that the interest factor varies by $a_t$ across the exogenously chosen grid of end-of-period assets, with a discontinuity in $\Rfree$ at $a_t=0$. +# +# To correctly handle this, the $\texttt{solveConsKinkedR}$ function inserts *two* instances of $a_t=0$ into the grid of $a_t$ values: the first corresponding to $\Rfree_{boro}$ ($a_t = -0$) and the other corresponding to $\Rfree_{save}$ ($a_t = +0$). The two consumption levels (and corresponding endogenous $m_t$ gridpoints) represent points at which the agent's first order condition is satisfied at *exactly* $a_t=0$ at the two different interest factors. In between these two points, the first order condition *does not hold with equality*: the consumer will end the period with exactly $a_t=0$, consuming $c_t=m_t$, but his marginal utility of consumption exceeds the marginal value of saving and is less than the marginal value of borrowing. This generates a consumption function with *two* kinks: two concave portions (for borrowing and saving) with a linear segment of slope 1 in between. + +# %% [markdown] +# ## Example parameter values to construct an instance of KinkedRconsumerType +# +# The parameters required to create an instance of $\texttt{KinkedRconsumerType}$ are nearly identical to those for $\texttt{IndShockConsumerType}$. The only difference is that the parameter $\texttt{Rfree}$ is replaced with $\texttt{Rboro}$ and $\texttt{Rsave}$. +# +# While the parameter $\texttt{CubicBool}$ is required to create a valid $\texttt{KinkedRconsumerType}$ instance, it must be set to $\texttt{False}$; cubic spline interpolation has not yet been implemented for this model. In the future, this restriction will be lifted. +# +# | Parameter | Description | Code | Example value | Time-varying? | +# | :---: | --- | --- | --- | --- | +# | $\DiscFac$ |Intertemporal discount factor | $\texttt{DiscFac}$ | $0.96$ | | +# | $\CRRA $ |Coefficient of relative risk aversion | $\texttt{CRRA}$ | $2.0$ | | +# | $\Rfree_{boro}$ | Risk free interest factor for borrowing | $\texttt{Rboro}$ | $1.20$ | | +# | $\Rfree_{save}$ | Risk free interest factor for saving | $\texttt{Rsave}$ | $1.01$ | | +# | $1 - \DiePrb_{t+1}$ |Survival probability | $\texttt{LivPrb}$ | $[0.98]$ | $\surd$ | +# |$\PermGroFac_{t+1}$|Permanent income growth factor|$\texttt{PermGroFac}$| $[1.01]$ | $\surd$ | +# | $\sigma_\psi $ | Standard deviation of log permanent income shocks | $\texttt{PermShkStd}$ | $[0.1]$ |$\surd$ | +# | $N_\psi $ | Number of discrete permanent income shocks | $\texttt{PermShkCount}$ | $7$ | | +# | $\sigma_\theta $ | Standard deviation of log transitory income shocks | $\texttt{TranShkStd}$ | $[0.2]$ | $\surd$ | +# | $N_\theta $ | Number of discrete transitory income shocks | $\texttt{TranShkCount}$ | $7$ | | +# | $\mho$ | Probability of being unemployed and getting $\theta=\underline{\theta}$ | $\texttt{UnempPrb}$ | $0.05$ | | +# | $\underline{\theta} $ | Transitory shock when unemployed | $\texttt{IncUnemp}$ | $0.3$ | | +# | $\mho^{Ret}$ | Probability of being "unemployed" when retired | $\texttt{UnempPrb}$ | $0.0005$ | | +# | $\underline{\theta}^{Ret} $ | Transitory shock when "unemployed" and retired | $\texttt{IncUnemp}$ | $0.0$ | | +# | $(none)$ | Period of the lifecycle model when retirement begins | $\texttt{T_retire}$ | $0$ | | +# | $(none)$ | Minimum value in assets-above-minimum grid | $\texttt{aXtraMin}$ | $0.001$ | | +# | $(none)$ | Maximum value in assets-above-minimum grid | $\texttt{aXtraMax}$ | $20.0$ | | +# | $(none)$ | Number of points in base assets-above-minimum grid | $\texttt{aXtraCount}$ | $48$ | | +# | $(none)$ | Exponential nesting factor for base assets-above-minimum grid | $\texttt{aXtraNestFac}$ | $3$ | | +# | $(none)$ | Additional values to add to assets-above-minimum grid | $\texttt{aXtraExtra}$ | $None$ | | +# | $\underline{a} $ | Artificial borrowing constraint (normalized) | $\texttt{BoroCnstArt}$ | $None$ | | +# | $(none) $ |Indicator for whether $\texttt{vFunc}$ should be computed | $\texttt{vFuncBool}$ | $True$ | | +# | $(none)$ |Indicator for whether $\texttt{cFunc}$ should use cubic splines | $\texttt{CubicBool}$ | $False$ | | +# |$T$| Number of periods in this type's "cycle" |$\texttt{T_cycle}$| $1$ | | +# |(none)| Number of times the "cycle" occurs |$\texttt{cycles}$| $0$ | | +# +# These example parameters are almostidentical to those used for $\texttt{IndShockExample}$ in the prior notebook, except that the interest rate on borrowing is 20% (like a credit card), and the interest rate on saving is 1%. Moreover, the artificial borrowing constraint has been set to $\texttt{None}$. The cell below defines a parameter dictionary with these example values. + +# %% {"code_folding": [0]} +KinkedRdict={ # Click the arrow to expand this parameter dictionary + # Parameters shared with the perfect foresight model + "CRRA" : 2.0, # Coefficient of relative risk aversion + "DiscFac": 0.96, # Intertemporal discount factor + "LivPrb" : [0.98], # Survival probability + "PermGroFac" :[1.01], # Permanent income growth factor + + # New parameters unique to the "kinked R" model + "Rboro" : 1.20, # Interest factor on borrowing (a < 0) + "Rsave" : 1.01, # Interest factor on saving (a > 0) + + # Parameters that specify the income distribution over the lifecycle + "PermShkStd" : [0.1], # Standard deviation of log permanent shocks to income + "PermShkCount" : 7, # Number of points in discrete approximation to permanent income shocks + "TranShkStd" : [0.2], # Standard deviation of log transitory shocks to income + "TranShkCount" : 7, # Number of points in discrete approximation to transitory income shocks + "UnempPrb" : 0.05, # Probability of unemployment while working + "IncUnemp" : 0.3, # Unemployment benefits replacement rate + "UnempPrbRet" : 0.0005, # Probability of "unemployment" while retired + "IncUnempRet" : 0.0, # "Unemployment" benefits when retired + "T_retire" : 0, # Period of retirement (0 --> no retirement) + "tax_rate" : 0.0, # Flat income tax rate (legacy parameter, will be removed in future) + + # Parameters for constructing the "assets above minimum" grid + "aXtraMin" : 0.001, # Minimum end-of-period "assets above minimum" value + "aXtraMax" : 20, # Maximum end-of-period "assets above minimum" value + "aXtraCount" : 48, # Number of points in the base grid of "assets above minimum" + "aXtraNestFac" : 3, # Exponential nesting factor when constructing "assets above minimum" grid + "aXtraExtra" : [None], # Additional values to add to aXtraGrid + + # A few other paramaters + "BoroCnstArt" : None, # Artificial borrowing constraint; imposed minimum level of end-of period assets + "vFuncBool" : True, # Whether to calculate the value function during solution + "CubicBool" : False, # Preference shocks currently only compatible with linear cFunc + "T_cycle" : 1, # Number of periods in the cycle for this agent type + + # Parameters only used in simulation + "AgentCount" : 10000, # Number of agents of this type + "T_sim" : 500, # Number of periods to simulate + "aNrmInitMean" : -6.0, # Mean of log initial assets + "aNrmInitStd" : 1.0, # Standard deviation of log initial assets + "pLvlInitMean" : 0.0, # Mean of log initial permanent income + "pLvlInitStd" : 0.0, # Standard deviation of log initial permanent income + "PermGroFacAgg" : 1.0, # Aggregate permanent income growth factor + "T_age" : None, # Age after which simulated agents are automatically killed +} + +# %% [markdown] +# ## Solving and examining the solution of the "kinked R" model +# +# The cell below creates an infinite horizon instance of $\texttt{KinkedRconsumerType}$ and solves its model by calling its $\texttt{solve}$ method. + +# %% +KinkyExample = KinkedRconsumerType(**KinkedRdict) +KinkyExample.cycles = 0 # Make the example infinite horizon +KinkyExample.solve() + +# %% [markdown] +# An element of a $\texttt{KinkedRconsumerType}$'s solution will have all the same attributes as that of a $\texttt{IndShockConsumerType}$; see that notebook for details. +# +# We can plot the consumption function of our "kinked R" example, as well as the MPC: + +# %% +print('Kinked R consumption function:') +plotFuncs(KinkyExample.solution[0].cFunc,KinkyExample.solution[0].mNrmMin,5) + +print('Kinked R marginal propensity to consume:') +plotFuncsDer(KinkyExample.solution[0].cFunc,KinkyExample.solution[0].mNrmMin,5) + +# %% [markdown] +# ## Simulating the "kinked R" model +# +# In order to generate simulated data, an instance of $\texttt{KinkedRconsumerType}$ needs to know how many agents there are that share these particular parameters (and are thus *ex ante* homogeneous), the distribution of states for newly "born" agents, and how many periods to simulated. These simulation parameters are described in the table below, along with example values. +# +# | Description | Code | Example value | +# | :---: | --- | --- | +# | Number of consumers of this type | $\texttt{AgentCount}$ | $10000$ | +# | Number of periods to simulate | $\texttt{T_sim}$ | $500$ | +# | Mean of initial log (normalized) assets | $\texttt{aNrmInitMean}$ | $-6.0$ | +# | Stdev of initial log (normalized) assets | $\texttt{aNrmInitStd}$ | $1.0$ | +# | Mean of initial log permanent income | $\texttt{pLvlInitMean}$ | $0.0$ | +# | Stdev of initial log permanent income | $\texttt{pLvlInitStd}$ | $0.0$ | +# | Aggregrate productivity growth factor | $\texttt{PermGroFacAgg}$ | $1.0$ | +# | Age after which consumers are automatically killed | $\texttt{T_age}$ | $None$ | +# +# Here, we will simulate 10,000 consumers for 500 periods. All newly born agents will start with permanent income of exactly $P_t = 1.0 = \exp(\texttt{pLvlInitMean})$, as $\texttt{pLvlInitStd}$ has been set to zero; they will have essentially zero assets at birth, as $\texttt{aNrmInitMean}$ is $-6.0$; assets will be less than $1\%$ of permanent income at birth. +# +# These example parameter values were already passed as part of the parameter dictionary that we used to create $\texttt{KinkyExample}$, so it is ready to simulate. We need to set the $\texttt{track_vars}$ attribute to indicate the variables for which we want to record a *history*. + +# %% +KinkyExample.track_vars = ['mNrmNow','cNrmNow','pLvlNow'] +KinkyExample.initializeSim() +KinkyExample.simulate() + +# %% [markdown] +# We can plot the average (normalized) market resources in each simulated period: + +# %% +plt.plot(np.mean(KinkyExample.mNrmNow_hist,axis=1)) +plt.xlabel('Time') +plt.ylabel('Mean market resources') +plt.show() + +# %% [markdown] +# Now let's plot the distribution of (normalized) assets $a_t$ for the current population, after simulating for $500$ periods; this should be fairly close to the long run distribution: + +# %% +plt.plot(np.sort(KinkyExample.aNrmNow),np.linspace(0.,1.,KinkyExample.AgentCount)) +plt.xlabel('End-of-period assets') +plt.ylabel('Cumulative distribution') +plt.ylim(-0.01,1.01) +plt.show() + +# %% [markdown] +# We can see there's a significant point mass of consumers with *exactly* $a_t=0$; these are consumers who do not find it worthwhile to give up a bit of consumption to begin saving (because $\Rfree_{save}$ is too low), and also are not willing to finance additional consumption by borrowing (because $\Rfree_{boro}$ is too high). +# +# The smaller point masses in this distribution are due to $\texttt{HARK}$ drawing simulated income shocks from the discretized distribution, rather than the "true" lognormal distributions of shocks. For consumers who ended $t-1$ with $a_{t-1}=0$ in assets, there are only 8 values the transitory shock $\theta_{t}$ can take on, and thus only 8 values of $m_t$ thus $a_t$ they can achieve; the value of $\psi_t$ is immaterial to $m_t$ when $a_{t-1}=0$. You can verify this by changing $\texttt{TranShkCount}$ to some higher value, like 25, in the dictionary above, then running the subsequent cells; the smaller point masses will not be visible to the naked eye. diff --git a/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.ipynb b/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.ipynb index fbf80a14..2b329f99 100644 --- a/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.ipynb +++ b/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.ipynb @@ -63,7 +63,6 @@ "# Import related generic python packages\n", "\n", "# Set how many digits past the decimal point should be printed?\n", - "from time import clock\n", "mystr = lambda number : \"{:.4f}\".format(number)\n", "decfmt4 = lambda number : \"{:.4f}\".format(number)\n", "decfmt3 = lambda number : \"{:.3f}\".format(number)\n", diff --git a/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.py b/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.py index 3d55cc38..7aa92068 100644 --- a/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.py +++ b/notebooks/Micro-and-Macro-Implications-of-Very-Impatient-HHs.py @@ -7,7 +7,7 @@ # extension: .py # format_name: percent # format_version: '1.2' -# jupytext_version: 1.2.1 +# jupytext_version: 1.2.3 # kernelspec: # display_name: Python 3 # language: python @@ -61,7 +61,6 @@ # Import related generic python packages # Set how many digits past the decimal point should be printed? -from time import clock mystr = lambda number : "{:.4f}".format(number) decfmt4 = lambda number : "{:.4f}".format(number) decfmt3 = lambda number : "{:.3f}".format(number) diff --git a/notebooks/PerfForesightConsumerType.ipynb b/notebooks/PerfForesightConsumerType.ipynb index 25bfddfa..f390a439 100644 --- a/notebooks/PerfForesightConsumerType.ipynb +++ b/notebooks/PerfForesightConsumerType.ipynb @@ -9,7 +9,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "code_folding": [ 0 @@ -20,7 +20,6 @@ "# Initial imports and notebook setup, click arrow to show\n", "from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType\n", "from HARK.utilities import plotFuncs\n", - "from time import clock\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "mystr = lambda number : \"{:.4f}\".format(number)" @@ -672,5 +671,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/PerfForesightConsumerType.py b/notebooks/PerfForesightConsumerType.py index 3b7eeea6..2f3b9c91 100644 --- a/notebooks/PerfForesightConsumerType.py +++ b/notebooks/PerfForesightConsumerType.py @@ -7,7 +7,7 @@ # extension: .py # format_name: percent # format_version: '1.2' -# jupytext_version: 1.2.1 +# jupytext_version: 1.2.3 # kernelspec: # display_name: Python 3 # language: python @@ -21,7 +21,6 @@ # Initial imports and notebook setup, click arrow to show from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType from HARK.utilities import plotFuncs -from time import clock import matplotlib.pyplot as plt import numpy as np mystr = lambda number : "{:.4f}".format(number) diff --git a/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.ipynb b/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.ipynb index bb2b76ba..e51e4394 100644 --- a/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.ipynb +++ b/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.ipynb @@ -36,8 +36,7 @@ "import os\n", "\n", "import numpy as np\n", - "from copy import deepcopy\n", - "from time import clock" + "from copy import deepcopy" ] }, { @@ -357,5 +356,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.py b/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.py index 30791e74..9f8a6c08 100644 --- a/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.py +++ b/notebooks/Structural-Estimates-From-Empirical-MPCs-Fagereng-et-al.py @@ -38,7 +38,6 @@ import numpy as np from copy import deepcopy -from time import clock # %% {"code_folding": [0]} # Import needed tools from HARK diff --git a/notebooks/TractableBufferStock-Interactive.ipynb b/notebooks/TractableBufferStock-Interactive.ipynb index f959d74e..1c446a9e 100644 --- a/notebooks/TractableBufferStock-Interactive.ipynb +++ b/notebooks/TractableBufferStock-Interactive.ipynb @@ -18,7 +18,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "code_folding": [ 0 @@ -32,7 +32,6 @@ "\n", "import numpy as np\n", "import HARK \n", - "from time import clock\n", "from copy import deepcopy\n", "mystr = lambda number : \"{:.3f}\".format(number)\n", "\n", @@ -132,7 +131,8 @@ "code_folding": [ 0, 3 - ] + ], + "lines_to_next_cell": 3 }, "outputs": [ { @@ -317,8 +317,7 @@ " plot_emp = plot_emp_widget,\n", " plot_ret = plot_ret_widget,\n", " plot_mSS = plot_mSS_widget,\n", - " );\n", - "\n" + " );" ] } ], diff --git a/notebooks/TractableBufferStock-Interactive.py b/notebooks/TractableBufferStock-Interactive.py index a6f95bed..7f56437e 100644 --- a/notebooks/TractableBufferStock-Interactive.py +++ b/notebooks/TractableBufferStock-Interactive.py @@ -8,7 +8,7 @@ # extension: .py # format_name: percent # format_version: '1.2' -# jupytext_version: 1.2.1 +# jupytext_version: 1.2.3 # kernel_info: # name: python3 # kernelspec: @@ -32,7 +32,6 @@ import numpy as np import HARK -from time import clock from copy import deepcopy mystr = lambda number : "{:.3f}".format(number) diff --git a/notebooks/Uncertainty-and-the-Saving-Rate.ipynb b/notebooks/Uncertainty-and-the-Saving-Rate.ipynb index c5a1150c..48589728 100644 --- a/notebooks/Uncertainty-and-the-Saving-Rate.ipynb +++ b/notebooks/Uncertainty-and-the-Saving-Rate.ipynb @@ -108,7 +108,7 @@ "\n", "from copy import deepcopy\n", "from scipy.optimize import golden, brentq\n", - "from time import clock\n", + "from time import time\n", "import numpy as np\n", "import scipy as sp" ] @@ -238,7 +238,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": { "code_folding": [ 0 @@ -274,13 +274,13 @@ " center_range = param_range,\n", " spread = spread,\n", " dist_type = Params.dist_type) # Distribution of DiscFac\n", - " t_start = clock()\n", + " t_start = time()\n", " \n", " spread_estimate = golden(paramDistObjective \n", " ,brack=spread_range\n", " ,tol=1e-4) \n", " center_estimate = EstimationEconomy.center_save\n", - " t_end = clock()\n", + " t_end = time()\n", " else: # Run the param-point estimation only\n", " paramPointObjective = lambda center : cstwMPC.getKYratioDifference(Economy = EstimationEconomy,\n", " param_name = Params.param_name,\n", @@ -288,12 +288,12 @@ " center = center,\n", " spread = 0.0,\n", " dist_type = Params.dist_type)\n", - " t_start = clock()\n", + " t_start = time()\n", " center_estimate = brentq(paramPointObjective # Find best point estimate \n", " ,param_range[0]\n", " ,param_range[1],xtol=1e-6)\n", " spread_estimate = 0.0\n", - " t_end = clock()\n", + " t_end = time()\n", " \n", " print(spread_estimate)\n", " print('****************')\n", @@ -754,5 +754,5 @@ } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/notebooks/Uncertainty-and-the-Saving-Rate.py b/notebooks/Uncertainty-and-the-Saving-Rate.py index 01e42a88..931cb426 100644 --- a/notebooks/Uncertainty-and-the-Saving-Rate.py +++ b/notebooks/Uncertainty-and-the-Saving-Rate.py @@ -107,7 +107,7 @@ def in_ipynb(): from copy import deepcopy from scipy.optimize import golden, brentq -from time import clock +from time import time import numpy as np import scipy as sp @@ -211,13 +211,13 @@ def in_ipynb(): center_range = param_range, spread = spread, dist_type = Params.dist_type) # Distribution of DiscFac - t_start = clock() + t_start = time() spread_estimate = golden(paramDistObjective ,brack=spread_range ,tol=1e-4) center_estimate = EstimationEconomy.center_save - t_end = clock() + t_end = time() else: # Run the param-point estimation only paramPointObjective = lambda center : cstwMPC.getKYratioDifference(Economy = EstimationEconomy, param_name = Params.param_name, @@ -225,12 +225,12 @@ def in_ipynb(): center = center, spread = 0.0, dist_type = Params.dist_type) - t_start = clock() + t_start = time() center_estimate = brentq(paramPointObjective # Find best point estimate ,param_range[0] ,param_range[1],xtol=1e-6) spread_estimate = 0.0 - t_end = clock() + t_end = time() print(spread_estimate) print('****************')