diff --git a/doc/tutorials/error_analysis/error_analysis_part1.ipynb b/doc/tutorials/error_analysis/error_analysis_part1.ipynb index a6ffd8be807..8e85c210993 100644 --- a/doc/tutorials/error_analysis/error_analysis_part1.ipynb +++ b/doc/tutorials/error_analysis/error_analysis_part1.ipynb @@ -38,6 +38,7 @@ "source": [ "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "plt.rcParams.update({'font.size': 18})\n", "\n", "np.random.seed(43)\n", "\n", @@ -68,9 +69,23 @@ "time_series_2 = ar_1_process(N_SAMPLES, C_2, PHI_2, EPS_2)\n", "\n", "\n", + "print(\"Done\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "fig = plt.figure(figsize=(10, 6))\n", "plt.title(\"The first 1000 samples of both time series\")\n", "plt.plot(time_series_1[0:1000], label=\"time series 1\")\n", "plt.plot(time_series_2[0:1000], label=\"time series 2\")\n", + "plt.xlabel(\"$i$\")\n", + "plt.ylabel(\"$X_i$\")\n", "plt.legend()\n", "plt.show()" ] @@ -146,8 +161,11 @@ "metadata": {}, "outputs": [], "source": [ - "plt.plot(time_series_1[1000:1050],\"x\")\n", - "plt.ylim((7,20))\n", + "fig = plt.figure(figsize=(10, 6))\n", + "plt.plot(time_series_1[1000:1050], \"x\")\n", + "fig.axes[0].margins(y=0.1)\n", + "plt.xlabel(\"$i$\")\n", + "plt.ylabel(\"$X_i$\")\n", "plt.show()" ] }, @@ -166,8 +184,12 @@ "metadata": {}, "outputs": [], "source": [ - "plt.plot(time_series_1[2000:42000:800],\"x\")\n", - "plt.ylim((7,20))\n", + "fig = plt.figure(figsize=(10, 6))\n", + "plt.plot(np.arange(2000, 42000, 800), time_series_1[2000:42000:800], \"x\")\n", + "fig.axes[0].margins(y=0.1)\n", + "plt.xlabel(\"$i$\")\n", + "plt.ylabel(\"$X_i$\")\n", + "fig.axes[0].xaxis.set_major_locator(plt.MultipleLocator(base=8000))\n", "plt.show()" ] }, @@ -337,8 +359,11 @@ "for s in range(len(sizes)):\n", " sems[s] = do_binning_analysis(time_series_1,sizes[s])\n", "\n", + "plt.figure(figsize=(10, 6))\n", "plt.plot(sizes,sems, \"x\")\n", "plt.xscale(\"log\")\n", + "plt.xlabel(\"$N_B$\")\n", + "plt.ylabel(\"SEM\")\n", "plt.show()\n", "```" ] @@ -380,9 +405,12 @@ "\n", "fit_sems = fit_fn(sizes,*fit_params)\n", "\n", - "plt.plot(sizes,sems[:CUTOFF], \"x\")\n", - "plt.plot(sizes,fit_sems,\"-\")\n", + "plt.figure(figsize=(10, 6))\n", + "plt.plot(sizes, sems[:CUTOFF], \"x\")\n", + "plt.plot(sizes, fit_sems, \"-\")\n", "plt.xscale(\"log\")\n", + "plt.xlabel(\"$N_B$\")\n", + "plt.ylabel(\"SEM\")\n", "plt.show()\n", "\n", "print(\"Final Standard Error of the Mean: {:.3f}\".format(fit_params[2]))" @@ -412,8 +440,11 @@ "for s in range(len(sizes)):\n", " sems[s] = do_binning_analysis(time_series_2,sizes[s])\n", "\n", + "plt.figure(figsize=(10, 6))\n", "plt.plot(sizes,sems, \"x\")\n", "plt.xscale(\"log\")\n", + "plt.xlabel(\"$N_B$\")\n", + "plt.ylabel(\"SEM\")\n", "plt.show()" ] }, diff --git a/doc/tutorials/error_analysis/error_analysis_part2.ipynb b/doc/tutorials/error_analysis/error_analysis_part2.ipynb index 61c20d42008..4a7b097c3ea 100644 --- a/doc/tutorials/error_analysis/error_analysis_part2.ipynb +++ b/doc/tutorials/error_analysis/error_analysis_part2.ipynb @@ -36,6 +36,7 @@ "source": [ "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "plt.rcParams.update({'font.size': 18})\n", "\n", "np.random.seed(43)\n", "\n", @@ -66,9 +67,21 @@ "time_series_2 = ar_1_process(N_SAMPLES, C_2, PHI_2, EPS_2)\n", "\n", "\n", + "print(\"Done\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig = plt.figure(figsize=(10, 6))\n", "plt.title(\"The first 1000 samples of both time series\")\n", "plt.plot(time_series_1[0:1000], label=\"time series 1\")\n", "plt.plot(time_series_2[0:1000], label=\"time series 2\")\n", + "plt.xlabel(\"$i$\")\n", + "plt.ylabel(\"$X_i$\")\n", "plt.legend()\n", "plt.show()" ] @@ -166,9 +179,10 @@ " temp += (time_series_1[i]-avg)*(time_series_1[i+j]-avg)\n", " autocov[j] = temp / N_SAMPLES\n", "\n", + "fig = plt.figure(figsize=(10, 6))\n", "plt.plot(autocov)\n", - "plt.xlabel(\"lag time\")\n", - "plt.ylabel(\"auto-covariance\")\n", + "plt.xlabel(\"lag time $j$\")\n", + "plt.ylabel(\"$\\hat{R}^{XX}_j$\")\n", "plt.show()\n", "```" ] @@ -203,10 +217,11 @@ " autocov[j] = np.dot(time_series_1_centered[:N_SAMPLES-j], time_series_1_centered[j:])\n", "autocov /= N_SAMPLES\n", "\n", + "fig = plt.figure(figsize=(10, 6))\n", "plt.gca().axhline(0, color=\"gray\",linewidth=1)\n", "plt.plot(autocov)\n", - "plt.xlabel(\"lag time\")\n", - "plt.ylabel(\"auto-covariance\")\n", + "plt.xlabel(\"lag time $j$\")\n", + "plt.ylabel(\"$\\hat{R}^{XX}_j$\")\n", "plt.show()" ] }, @@ -243,12 +258,13 @@ "j = np.linspace(0,N_MAX-1,N_MAX)\n", "popt, pcov = curve_fit(exp_fnc, j, autocov[:N_MAX], p0=[15,10])\n", "\n", - "plt.plot(j,autocov[:N_MAX])\n", + "fig = plt.figure(figsize=(10, 6))\n", + "plt.plot(j,autocov[:N_MAX], 'x')\n", "plt.plot(j,exp_fnc(j,popt[0],popt[1]))\n", "plt.xlim((1,N_MAX))\n", "plt.xscale(\"log\")\n", - "plt.xlabel(\"lag time\")\n", - "plt.ylabel(\"auto-covariance\")\n", + "plt.xlabel(\"lag time $j$\")\n", + "plt.ylabel(\"$\\hat{R}^{XX}_j$\")\n", "plt.show()\n", "\n", "print(\"Exponential autocorrelation time: {:.2f} sampling intervals\".format(popt[1]))" @@ -283,6 +299,7 @@ " tau_int_v[j_max] = 0.5 + np.sum(acf[1:j_max+1])\n", "\n", "# plot\n", + "fig = plt.figure(figsize=(10, 6))\n", "plt.plot(j_max_v,tau_int_v)\n", "plt.xscale(\"log\")\n", "plt.xlabel(r\"sum length $j_\\mathrm{max}$\")\n", @@ -317,6 +334,7 @@ " j_max += 1\n", "\n", "# plot\n", + "fig = plt.figure(figsize=(10, 6))\n", "plt.plot(j_max_v,C*tau_int_v)\n", "plt.plot(j_max_v, j_max_v)\n", "plt.plot([j_max],[j_max],\"ro\")\n", @@ -419,13 +437,15 @@ " sem = np.sqrt(autocov[0]/N_eff)\n", "\n", " # create ACF plot\n", + " fig = plt.figure(figsize=(10, 6))\n", " plt.gca().axhline(0, color=\"gray\",linewidth=1)\n", " plt.plot(acf)\n", - " plt.title(\"Autocorrelation function\")\n", - " plt.xlabel(\"lag time\")\n", + " plt.xlabel(\"lag time $j$\")\n", + " plt.ylabel(\"$\\hat{R}^{XX}_j$\")\n", " plt.show()\n", " \n", " # create integrated ACF plot\n", + " fig = plt.figure(figsize=(10, 6))\n", " plt.plot(j_max_v,C*tau_int_v)\n", " plt.ylim(plt.gca().get_ylim()) # explicitly keep the limits of the first plot\n", " plt.plot(j_max_v, j_max_v)\n",