diff --git a/notebooks/02-rainnet.ipynb b/notebooks/02-rainnet.ipynb index 3c52c90..7db15ee 100644 --- a/notebooks/02-rainnet.ipynb +++ b/notebooks/02-rainnet.ipynb @@ -4,44 +4,15 @@ "cell_type": "code", "execution_count": 1, "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 1;\n", - " var nbb_unformatted_code = \"%reload_ext autoreload\\n%autoreload 2\\n%reload_ext nb_black\";\n", - " var nbb_formatted_code = \"%reload_ext autoreload\\n%autoreload 2\\n%reload_ext nb_black\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ - "%reload_ext autoreload\n", - "%autoreload 2\n", - "%reload_ext nb_black" + "import sys\n", + "sys.path.insert(0, \"../src\")" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -49,9 +20,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 8;\n", - " var nbb_unformatted_code = \"import sys\\nsys.path.insert(0, \\\"../src\\\")\";\n", - " var nbb_formatted_code = \"import sys\\n\\nsys.path.insert(0, \\\"../src\\\")\";\n", + " var nbb_cell_id = 2;\n", + " var nbb_unformatted_code = \"%reload_ext autoreload\\n%autoreload 2\\n%reload_ext nb_black\";\n", + " var nbb_formatted_code = \"%reload_ext autoreload\\n%autoreload 2\\n%reload_ext nb_black\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -73,14 +44,14 @@ } ], "source": [ - "import sys\n", - "\n", - "sys.path.insert(0, \"../src\")" + "%reload_ext autoreload\n", + "%autoreload 2\n", + "%reload_ext nb_black" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -88,9 +59,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 9;\n", - " var nbb_unformatted_code = \"import gc\\nimport functools\\nimport sys\\nfrom pathlib import Path\\nfrom concurrent.futures import ThreadPoolExecutor\\nfrom tqdm.notebook import tqdm\\n\\nimport cv2\\nimport numpy as np\\nimport pandas as pd\\nimport matplotlib.pyplot as plt\\n\\nimport torch\\nimport torch.nn as nn\\nimport torch.nn.functional as F\\nimport pytorch_lightning as pl\\n\\nfrom data import NowcastingDataset\\nfrom loss import LogCoshLoss\";\n", - " var nbb_formatted_code = \"import gc\\nimport functools\\nimport sys\\nfrom pathlib import Path\\nfrom concurrent.futures import ThreadPoolExecutor\\nfrom tqdm.notebook import tqdm\\n\\nimport cv2\\nimport numpy as np\\nimport pandas as pd\\nimport matplotlib.pyplot as plt\\n\\nimport torch\\nimport torch.nn as nn\\nimport torch.nn.functional as F\\nimport pytorch_lightning as pl\\n\\nfrom data import NowcastingDataset\\nfrom loss import LogCoshLoss\";\n", + " var nbb_cell_id = 3;\n", + " var nbb_unformatted_code = \"import gc\\nimport functools\\nfrom pathlib import Path\\nfrom concurrent.futures import ThreadPoolExecutor\\nfrom tqdm.notebook import tqdm\\n\\nimport cv2\\nimport numpy as np\\nimport pandas as pd\\nimport matplotlib.pyplot as plt\\nfrom sklearn import metrics\\n\\nimport torch\\nimport torch.nn as nn\\nimport torch.nn.functional as F\\nimport torchvision.transforms as T\\nimport pytorch_lightning as pl\\nfrom torch.utils.data import SequentialSampler, RandomSampler\\n\\nimport optim\\nfrom data import NowcastingDataset\\nfrom loss import LogCoshLoss\\nfrom utils import visualize, radar2precipitation\";\n", + " var nbb_formatted_code = \"import gc\\nimport functools\\nfrom pathlib import Path\\nfrom concurrent.futures import ThreadPoolExecutor\\nfrom tqdm.notebook import tqdm\\n\\nimport cv2\\nimport numpy as np\\nimport pandas as pd\\nimport matplotlib.pyplot as plt\\nfrom sklearn import metrics\\n\\nimport torch\\nimport torch.nn as nn\\nimport torch.nn.functional as F\\nimport torchvision.transforms as T\\nimport pytorch_lightning as pl\\nfrom torch.utils.data import SequentialSampler, RandomSampler\\n\\nimport optim\\nfrom data import NowcastingDataset\\nfrom loss import LogCoshLoss\\nfrom utils import visualize, radar2precipitation\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -114,7 +85,6 @@ "source": [ "import gc\n", "import functools\n", - "import sys\n", "from pathlib import Path\n", "from concurrent.futures import ThreadPoolExecutor\n", "from tqdm.notebook import tqdm\n", @@ -123,14 +93,19 @@ "import numpy as np\n", "import pandas as pd\n", "import matplotlib.pyplot as plt\n", + "from sklearn import metrics\n", "\n", "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", + "import torchvision.transforms as T\n", "import pytorch_lightning as pl\n", + "from torch.utils.data import SequentialSampler, RandomSampler\n", "\n", + "import optim\n", "from data import NowcastingDataset\n", - "from loss import LogCoshLoss" + "from loss import LogCoshLoss\n", + "from utils import visualize, radar2precipitation" ] }, { @@ -144,8 +119,8 @@ "\n", " setTimeout(function() {\n", " var nbb_cell_id = 4;\n", - " var nbb_unformatted_code = \"PATH = Path(\\\"../input\\\")\\nDAMS = (6071, 6304, 7026, 7629, 7767, 8944, 11107)\\ndf = pd.read_csv(PATH / \\\"train_folds.csv\\\")\";\n", - " var nbb_formatted_code = \"PATH = Path(\\\"../input\\\")\\nDAMS = (6071, 6304, 7026, 7629, 7767, 8944, 11107)\\ndf = pd.read_csv(PATH / \\\"train_folds.csv\\\")\";\n", + " var nbb_unformatted_code = \"args = dict(\\n dams=(6071, 6304, 7026, 7629, 7767, 8944, 11107),\\n train_folds_csv=Path(\\\"../input/train_folds.csv\\\"),\\n train_data_path=Path(\\\"../input/train-128\\\"),\\n test_data_path=Path(\\\"../input/test-128\\\"),\\n num_workers=4,\\n gpus=1,\\n lr=1e-4,\\n max_epochs=50,\\n batch_size=64,\\n precision=16,\\n optimizer=\\\"adamw\\\",\\n scheduler=\\\"cosine\\\",\\n gradient_accumulation_steps=1,\\n)\";\n", + " var nbb_formatted_code = \"args = dict(\\n dams=(6071, 6304, 7026, 7629, 7767, 8944, 11107),\\n train_folds_csv=Path(\\\"../input/train_folds.csv\\\"),\\n train_data_path=Path(\\\"../input/train-128\\\"),\\n test_data_path=Path(\\\"../input/test-128\\\"),\\n num_workers=4,\\n gpus=1,\\n lr=1e-4,\\n max_epochs=50,\\n batch_size=64,\\n precision=16,\\n optimizer=\\\"adamw\\\",\\n scheduler=\\\"cosine\\\",\\n gradient_accumulation_steps=1,\\n)\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -167,9 +142,21 @@ } ], "source": [ - "PATH = Path(\"../input\")\n", - "DAMS = (6071, 6304, 7026, 7629, 7767, 8944, 11107)\n", - "df = pd.read_csv(PATH / \"train_folds.csv\")" + "args = dict(\n", + " dams=(6071, 6304, 7026, 7629, 7767, 8944, 11107),\n", + " train_folds_csv=Path(\"../input/train_folds.csv\"),\n", + " train_data_path=Path(\"../input/train-128\"),\n", + " test_data_path=Path(\"../input/test-128\"),\n", + " num_workers=4,\n", + " gpus=1,\n", + " lr=1e-4,\n", + " max_epochs=50,\n", + " batch_size=64,\n", + " precision=16,\n", + " optimizer=\"adamw\",\n", + " scheduler=\"cosine\",\n", + " gradient_accumulation_steps=1,\n", + ")" ] }, { @@ -179,69 +166,6 @@ "# 🔥 RainNet ⚡️" ] }, - { - "cell_type": "markdown", - "metadata": { - "heading_collapsed": true - }, - "source": [ - "## Utils" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "hidden": true - }, - "outputs": [ - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 5;\n", - " var nbb_unformatted_code = \"def visualize(x, y=None, test=False):\\n cmap = plt.cm.get_cmap(\\\"RdBu\\\")\\n cmap = cmap.reversed()\\n if test:\\n fig, axes = plt.subplots(1, 4, figsize=(10, 10))\\n for i, ax in enumerate(axes):\\n img = x[:, :, i]\\n ax.imshow(img, cmap=cmap)\\n else:\\n fig, axes = plt.subplots(1, 5, figsize=(10, 10))\\n for i, ax in enumerate(axes[:-1]):\\n img = x[:, :, i]\\n ax.imshow(img, cmap=cmap)\\n axes[-1].imshow(y[:, :, 0], cmap=cmap)\\n # plt.tight_layout()\\n plt.show()\";\n", - " var nbb_formatted_code = \"def visualize(x, y=None, test=False):\\n cmap = plt.cm.get_cmap(\\\"RdBu\\\")\\n cmap = cmap.reversed()\\n if test:\\n fig, axes = plt.subplots(1, 4, figsize=(10, 10))\\n for i, ax in enumerate(axes):\\n img = x[:, :, i]\\n ax.imshow(img, cmap=cmap)\\n else:\\n fig, axes = plt.subplots(1, 5, figsize=(10, 10))\\n for i, ax in enumerate(axes[:-1]):\\n img = x[:, :, i]\\n ax.imshow(img, cmap=cmap)\\n axes[-1].imshow(y[:, :, 0], cmap=cmap)\\n # plt.tight_layout()\\n plt.show()\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "def visualize(x, y=None, test=False):\n", - " cmap = plt.cm.get_cmap(\"RdBu\")\n", - " cmap = cmap.reversed()\n", - " if test:\n", - " fig, axes = plt.subplots(1, 4, figsize=(10, 10))\n", - " for i, ax in enumerate(axes):\n", - " img = x[:, :, i]\n", - " ax.imshow(img, cmap=cmap)\n", - " else:\n", - " fig, axes = plt.subplots(1, 5, figsize=(10, 10))\n", - " for i, ax in enumerate(axes[:-1]):\n", - " img = x[:, :, i]\n", - " ax.imshow(img, cmap=cmap)\n", - " axes[-1].imshow(y[:, :, 0], cmap=cmap)\n", - " # plt.tight_layout()\n", - " plt.show()" - ] - }, { "cell_type": "markdown", "metadata": { @@ -467,7 +391,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -475,9 +399,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 10;\n", - " var nbb_unformatted_code = \"class NowcastingDataModule(pl.LightningDataModule):\\n def __init__(self, df, fold, data_dir, batch_size, test=False, num_workers=4):\\n super().__init__()\\n self.df = df\\n self.fold = fold\\n self.data_dir = data_dir\\n self.batch_size = batch_size\\n self.test = test\\n self.num_workers = 4\\n\\n def setup(self, stage=\\\"train\\\"):\\n if stage == \\\"train\\\":\\n train_df = self.df[self.df.fold != self.fold]\\n val_df = self.df[self.df.fold == self.fold]\\n train_paths = [self.data_dir / \\\"train-128\\\" / fn for fn in train_df.filename.values]\\n val_paths = [self.data_dir / \\\"train-128\\\" / fn for fn in val_df.filename.values]\\n self.train_dataset = NowcastingDataset(train_paths)\\n self.val_dataset = NowcastingDataset(val_paths)\\n else:\\n test_paths = list((self.data_dir / \\\"test-128\\\").glob(\\\"*.npy\\\"))\\n self.test_dataset = NowcastingDataset(test_paths, test=True)\\n\\n def train_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.train_dataset,\\n batch_size=self.batch_size,\\n shuffle=True,\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\\n\\n def val_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.val_dataset,\\n batch_size=2 * self.batch_size,\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\\n\\n def test_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.test_dataset,\\n batch_size=2 * self.batch_size,\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\";\n", - " var nbb_formatted_code = \"class NowcastingDataModule(pl.LightningDataModule):\\n def __init__(self, df, fold, data_dir, batch_size, test=False, num_workers=4):\\n super().__init__()\\n self.df = df\\n self.fold = fold\\n self.data_dir = data_dir\\n self.batch_size = batch_size\\n self.test = test\\n self.num_workers = 4\\n\\n def setup(self, stage=\\\"train\\\"):\\n if stage == \\\"train\\\":\\n train_df = self.df[self.df.fold != self.fold]\\n val_df = self.df[self.df.fold == self.fold]\\n train_paths = [\\n self.data_dir / \\\"train-128\\\" / fn for fn in train_df.filename.values\\n ]\\n val_paths = [\\n self.data_dir / \\\"train-128\\\" / fn for fn in val_df.filename.values\\n ]\\n self.train_dataset = NowcastingDataset(train_paths)\\n self.val_dataset = NowcastingDataset(val_paths)\\n else:\\n test_paths = list((self.data_dir / \\\"test-128\\\").glob(\\\"*.npy\\\"))\\n self.test_dataset = NowcastingDataset(test_paths, test=True)\\n\\n def train_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.train_dataset,\\n batch_size=self.batch_size,\\n shuffle=True,\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\\n\\n def val_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.val_dataset,\\n batch_size=2 * self.batch_size,\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\\n\\n def test_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.test_dataset,\\n batch_size=2 * self.batch_size,\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\";\n", + " var nbb_cell_id = 5;\n", + " var nbb_unformatted_code = \"class NowcastingDataModule(pl.LightningDataModule):\\n def __init__(\\n self, train_df, val_df, batch_size=args[\\\"batch_size\\\"], num_workers=args[\\\"num_workers\\\"]\\n ):\\n super().__init__()\\n self.train_df = train_df\\n self.val_df = val_df\\n self.batch_size = batch_size\\n self.num_workers = num_workers\\n\\n def setup(self, stage=\\\"train\\\"):\\n if stage == \\\"train\\\":\\n train_paths = [\\n args[\\\"train_data_path\\\"] / fn for fn in self.train_df.filename.values\\n ]\\n val_paths = [args[\\\"train_data_path\\\"] / fn for fn in self.val_df.filename.values]\\n self.train_dataset = NowcastingDataset(train_paths)\\n self.val_dataset = NowcastingDataset(val_paths)\\n else:\\n test_paths = list(args[\\\"test_data_path\\\"].glob(\\\"*.npy\\\"))\\n self.test_dataset = NowcastingDataset(test_paths, test=True)\\n\\n def train_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.train_dataset,\\n batch_size=self.batch_size,\\n sampler=RandomSampler(self.train_dataset),\\n pin_memory=True,\\n num_workers=self.num_workers,\\n drop_last=True,\\n )\\n\\n def val_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.val_dataset,\\n batch_size=2 * self.batch_size,\\n sampler=SequentialSampler(self.val_dataset),\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\\n\\n def test_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.test_dataset,\\n batch_size=2 * self.batch_size,\\n sampler=SequentialSampler(self.test_dataset),\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\";\n", + " var nbb_formatted_code = \"class NowcastingDataModule(pl.LightningDataModule):\\n def __init__(\\n self,\\n train_df,\\n val_df,\\n batch_size=args[\\\"batch_size\\\"],\\n num_workers=args[\\\"num_workers\\\"],\\n ):\\n super().__init__()\\n self.train_df = train_df\\n self.val_df = val_df\\n self.batch_size = batch_size\\n self.num_workers = num_workers\\n\\n def setup(self, stage=\\\"train\\\"):\\n if stage == \\\"train\\\":\\n train_paths = [\\n args[\\\"train_data_path\\\"] / fn for fn in self.train_df.filename.values\\n ]\\n val_paths = [\\n args[\\\"train_data_path\\\"] / fn for fn in self.val_df.filename.values\\n ]\\n self.train_dataset = NowcastingDataset(train_paths)\\n self.val_dataset = NowcastingDataset(val_paths)\\n else:\\n test_paths = list(args[\\\"test_data_path\\\"].glob(\\\"*.npy\\\"))\\n self.test_dataset = NowcastingDataset(test_paths, test=True)\\n\\n def train_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.train_dataset,\\n batch_size=self.batch_size,\\n sampler=RandomSampler(self.train_dataset),\\n pin_memory=True,\\n num_workers=self.num_workers,\\n drop_last=True,\\n )\\n\\n def val_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.val_dataset,\\n batch_size=2 * self.batch_size,\\n sampler=SequentialSampler(self.val_dataset),\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\\n\\n def test_dataloader(self):\\n return torch.utils.data.DataLoader(\\n self.test_dataset,\\n batch_size=2 * self.batch_size,\\n sampler=SequentialSampler(self.test_dataset),\\n pin_memory=True,\\n num_workers=self.num_workers,\\n )\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -500,44 +424,42 @@ ], "source": [ "class NowcastingDataModule(pl.LightningDataModule):\n", - " def __init__(self, df, fold, data_dir, batch_size, test=False, num_workers=4):\n", + " def __init__(\n", + " self, train_df, val_df, batch_size=args[\"batch_size\"], num_workers=args[\"num_workers\"]\n", + " ):\n", " super().__init__()\n", - " self.df = df\n", - " self.fold = fold\n", - " self.data_dir = data_dir\n", + " self.train_df = train_df\n", + " self.val_df = val_df\n", " self.batch_size = batch_size\n", - " self.test = test\n", - " self.num_workers = 4\n", + " self.num_workers = num_workers\n", "\n", " def setup(self, stage=\"train\"):\n", " if stage == \"train\":\n", - " train_df = self.df[self.df.fold != self.fold]\n", - " val_df = self.df[self.df.fold == self.fold]\n", " train_paths = [\n", - " self.data_dir / \"train-128\" / fn for fn in train_df.filename.values\n", - " ]\n", - " val_paths = [\n", - " self.data_dir / \"train-128\" / fn for fn in val_df.filename.values\n", + " args[\"train_data_path\"] / fn for fn in self.train_df.filename.values\n", " ]\n", + " val_paths = [args[\"train_data_path\"] / fn for fn in self.val_df.filename.values]\n", " self.train_dataset = NowcastingDataset(train_paths)\n", " self.val_dataset = NowcastingDataset(val_paths)\n", " else:\n", - " test_paths = list((self.data_dir / \"test-128\").glob(\"*.npy\"))\n", + " test_paths = list(args[\"test_data_path\"].glob(\"*.npy\"))\n", " self.test_dataset = NowcastingDataset(test_paths, test=True)\n", "\n", " def train_dataloader(self):\n", " return torch.utils.data.DataLoader(\n", " self.train_dataset,\n", " batch_size=self.batch_size,\n", - " shuffle=True,\n", + " sampler=RandomSampler(self.train_dataset),\n", " pin_memory=True,\n", " num_workers=self.num_workers,\n", + " drop_last=True,\n", " )\n", "\n", " def val_dataloader(self):\n", " return torch.utils.data.DataLoader(\n", " self.val_dataset,\n", " batch_size=2 * self.batch_size,\n", + " sampler=SequentialSampler(self.val_dataset),\n", " pin_memory=True,\n", " num_workers=self.num_workers,\n", " )\n", @@ -546,6 +468,7 @@ " return torch.utils.data.DataLoader(\n", " self.test_dataset,\n", " batch_size=2 * self.batch_size,\n", + " sampler=SequentialSampler(self.test_dataset),\n", " pin_memory=True,\n", " num_workers=self.num_workers,\n", " )" @@ -553,29 +476,19 @@ }, { "cell_type": "code", - "execution_count": 12, - "metadata": {}, + "execution_count": 6, + "metadata": { + "scrolled": false + }, "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlYAAACDCAYAAABPyVWqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAB1V0lEQVR4nO29d3hcZ5X4/7l3etFIo1Hv3b3X2Il7mtMTElIgJASytF34AQsE2C9tl7awy9ITSkggvTqJHTu2495tucmyeu/SSJrR9Lnl94ds4SJ3yZKc+3keP9Zcje49c8+87z3veU8RVFVFQ0NDQ0NDQ0PjyhFHWgANDQ0NDQ0NjWsFzbDS0NDQ0NDQ0BgiNMNKQ0NDQ0NDQ2OI0AwrDQ0NDQ0NDY0hQjOsNDQ0NDQ0NDSGCM2w0tDQ0NDQ0NAYIobNsBIE4RZBEMoFQagSBOFbw3UdjeFH0+W1g6bLawtNn9cOmi6vHYThqGMlCIIOqABuBJqAfcBDqqqWDvnFNIYVTZfXDpoury00fV47aLq8thguj9VcoEpV1RpVVSPAy8Bdw3QtjeFF0+W1g6bLawtNn9cOmi6vIYbLsEoHGk953XTimMbYQ9PltYOmy2sLTZ/XDpouryH0w3ReYZBjp+05CoLwJPBk/wv9LMEcO0yiaFwMatDdpapq4iC/uqAuQdPnaEKN+FCl0GB6G1W6FI1mzHYboigQDUvIkoQUCoCqDMv1xiLn0SWMMn0OXE9nwBwTA0DY70eJhof1emOJsTI2dUYzRqsVRVYI+7wDY1JnNCMaTEjhAKoUHZZrjxXONzaHy7BqAjJPeZ0BtJwmlKo+AzwDIFoTVP24O4dJFI2LIXro2fpz/OqCugRNn6MJqfydc/1q1OlSNpoxOlMQFZnY9CJ6G48TdPeLFJs5AZ3JQm/dUZSP6CR+Hl3CKNTnAFYHsVkTCLhbCHQ2nvVrUW8A+MjpdSyNTcVoxmCNRfF2oir9hpXR7sRVMJMYl4O6PZuJ+HoG3m9xpZE7ZwEBX4SO8oOD6v1a4nxjc7i2AvcBhYIg5AqCYAQeBM47Q2iMWjRdXjuMOl2qiowsRZDDQRQpctrvBFGHIGoVYc7DqNPnSVRFRg6HzvseQdRdJWnGBKNOl6oinzUmFSmCIkUQROEs/QmiDr1Bh053LgfrR4dh8VipqioJgvAlYB2gA/6qquqx4biWxvCi6fLaYbTpMiY1n/z5c3DEW2mp6cbf00fU7xn4fW99yUiJNiYYbfo8iTkumfwFizBZ9FTu3Dvoe/o9VR8tb9X5GG26jEnNJ2vGTEL+CPV7NiCF/ADYk3MxO+JoPLSPsLfrtL8JdDZy5N1XRkLcUcdwbQWiquoaYM1wnV/j6jEadGlyJGC0xdLXWj2SYox5RoMuAayJmUxZcT1SVObohwcIuJvPmqg1Lsxo0eepKFKE9poGvE0Vp20VaZyf0aJLndFMYsFkQv4IzYd3DxhVAJ7GUm3BcxEMm2GloTFUONKLKFo4h8pdxSMtisYQIYeDHN2wHX9nw0cuzuZaJ+Lroatsz0iLoXGZyJEQtTveBRiIrTrJma81BkczrDRGPdGQjyNr1mir32uIsLdL81BpaIxSNAPqytAMK41Rz8ksMQ0NDQ0NjdGOlnKjoaGhoaGhoTFEaIaVhoaGhoaGhsYQoRlWGhoaGhoaGhpDhGZYaWhoaGhoaGgMEZphpaGhoaGhoaExRGiGlYaGhoaGhobGEKEZVhoaGhoaGhoaQ4RmWGloaGhoaGiMCDqjGXtyDnqzbaRFGTK0AqGDYHIkkD3nBsJBib6OZnrrjmptN64BdEYzMakFxGflI+oF9AYd1dvXEw14R1o0jSvA4krDlT8DnV4k7OvDXVWs6fQaIS57MrbETPydjYQ8nShShGjAo1UGvwYw2p24Cmby+Ufncvv4JP77wype//3fr4mxqxlWp5AydQlPfGIe31yYga58K578ZbxxvJPv/QK6q7Q+dWMVQRTJXXgHq76zlFy9j4jFSbk7xI/WllOzUzfS4mlcJjqjmRWf/iS/vGcSDqMOb0TmeKefH72SzLF1q5AjoZEWUeMyEEQRe3IueXPn8LW7JzEjNYYGTwirQUdajJFDbT7+7Wdr6SzbrS14xxB6sw2LMwWzMxlXRiqL5mZw15RUbmhYTfU33+DPX/48bQ/fz7Z/vHxa4+exiLYVeAKj3cmWX97D17teQzz0Pq1ZN/CN1eX8Y2stOdPGYbQ7R1pEjcske8HtHPn+POL+/h8IIS+b6zxsb+ghI95C1pzFmBwJIy2ixiUiiCL3fenTvH2zmfy+48QZFOLNOqal2Nn21CJ+/ZtvMutjD2OOSx5pUTUukaSJC9n85y+x87NZ3D0unrCsEpYUZqVYSd7yNDceeoaDv3uAxY89qo3dMYKoN3DTpx+h5cUnqfndXbz+tRuQFZWXDjQhLXiQhKn5lP/P71n1+HQ+8ZVPE5s5YaRFviI0j9UJ5EiQO3++hRjndfQdDNJZ+zd0RjN///5dLEyEJ3Pjee+FtfS1Vo+0qBqXSLC3hwfeqKObe2j8/lF66kv41lOf4H+z6mHhRH5TOYlf/XkH7SVbtS2GMYKqKGzfXMXNngwKk+0cLN9NOCixYn4mP5ku8InOrTz22FK+PfURXnm9mM6y3ZoHa4zQ23Ccz/51L3dcl01YqufdrbW0VdXy0MM38P1bvkjkz9/Fuv4P/Pmhz3F/T4CDb72ijdtRjqrIHNnfzL86TADsPdRKW1UNot7AqwUJfOzzP6Xo4Lvg6yQ93ooxZmw7MgRVVUdaBkRrgqofd+eIXNvkSMCekoO/o5GwtxNB1CGI/dtD6TNX8O1Pz+aOcS50gkCbP8rzB5p5Z2MVDfu2EPH1jIjMw0H00LMHVFWdPRTnGkl9DsigN+BIH4ciRfB3NqBIEQRRh9Eejy0pk+98+VY+OyUO4ehGdKm5dLvGs7aqm5/84yA121aP6S0GqfwdlECXMBTnGg26PImoN5A8eRGCqKOl+AMEUTxx3IgxJp5F99/Gz+6aSIHYg1q2C50rBSl3LlUeia+8cZTdr7w15sbsUOoSRpc+od/zKIi6QcebqDcgiDpSpi7l4XsnszA3njlpdpy9NSj2RNpUOzsbPXzx238bNQtendF8XgP+Wh2bgigOGLcGqwNHRhG9dSWD3gtBFDHa45m28hYevCGXdIcZgyiQ47TQ4YtQ3RNgS3kne3Y30lF+gFBP21UxnE/9DBfD+XT5kfVYCaJI/uI7WffdZegEeLa4hd1Vbm6eksKMVAetvjB3ZOjQtZQSfv8FKl7ZjLMgmR/eew/f/9EK/nx4Ct/9/t8Iupu11dIoIy57Mp/9l1v54vxMPGGF9ys72V3lxmLU8fj8bKYlW4n88Snq/96Mp76HxMnpJM6ZxMOzljHrG4v55rhENr/4FmFv10h/lI8kOqP5xMM2gqrIA+MrNnMiD9w7Dbcvwj6XnczceALBKN6eIGargacfmEJC+Xpkdxv6pHQEUYe+u4G02Gw+tSCbygMzaT20cYQ/3UeX+IKZ2BPS8HW1EOnrxt/ZQNHyu7lneQF/+N279LVWoTfbiUnLJy41nWBfEFWRuXPlOL65KAdzoBMkFdmZgVc1Eg0ruKwGHnj8NtZ+UElH6Y6zAp/NccnIkeBVCYgW9QZSpy/HXX2QoLtl2K83WogvmMmilXNpauvj6Np1JI6fx9Nfu4HfbZnJsUOtNBd/iMEWS9rkOUhRBXf1UVZ8/BZ+dscEUq0iEVWkMyBh1gs4jBay48wsynbSdX0u5V2zeHZzNeU7DuJpLB2YCy7VCLoYMueuxNNcjafx+BWf6yNjWJ25ktAZLUyYlEyDJ8zsOImv6/aim5+EmuhC6DlCpOYIpd9bQ3dVN91tfmY9Mp3U229FyJyATxaRVRWd3kj2gtsJ9vbQUbpjyBVtciRgjk0E+rcqfe11Q3r+sY49OYepNy3G7w3TcOQ4fa1VSCE/BpuDPdVuPj4tjbwPf8XnYuL415tvRdUb0fnrCb2/geLnd1Ld2MfsxVn9ep20mE5dLG8daKa6vD/76FycDK6N+D1XbHwJooioN2rbVCeIL5jJT795J6kxJl4pbmb3vibqdq1HCvnxNpfz9+d1/Pb/3cszC/q9ynJsGkI0SNCSgDnaR/eWD+lraCfzjhXIExbTGhI51ujljQPNSJHgBSfk4ZiwR/I6o4X4gpnse/oxUgONtFhu4s/7m/jVT59Disi8tbGKkKeTpIkL+c4XFvHApCRsgQ781iSskh8hGkTe/De6Dh4hbkI+xjk3E7TlcqTdx+82V6MTBBLSY+lrTTnNgBL1BtKmzCcm3kLp+jUIom5YPZaqIgPgzJ78kTGsjHYnP/3mnTySKVEvjOehsEzZpg187S9m7liSR2GKnV1ZsXzttgmszDbjx8TxrkXMsfXhf+PHuNvcuBZeR+aMm/Eb4ugMSPgjCp6whE6AcQk2vnrreF5KsLH6zw1EA14EUSRt5s0AtBSvG7JxFPR2kzNzGsc6G644eP6aN6yMdid/+/3XuDXfyRfeKmX1S+vxNlcghfx88PwbBCN380bWYTp37cNZmEk0sJG6dQexOM04C5JJnpmHIycF07gZKAXz8BlicPsltpZ1Eu7rxtNcjxwJXpQsMan5ONKL6CzbPegA1xnNmGITifo9CKKOzFnX88ht49lX0403GGX/W2sI9baf9xp6s23MZ1RcDOa4ZHY99xV8n7mX8Y/eSOCrX+KeP+xh/+uv4K7Yx+6G48h3T8Z9rJZgRy8xh47hPt6EsygNvdlE2qxUCu+cQlx+OvrkLDp1sbxe2sGfXz58UYPV6kojNnM8zfvfv6iBfXLbSlUUDFYHVlc6fa1VGO3xJBTNpvXQRs24Ah57bBl37vsdxhgr193zDT6YnMIP9CLV29YRk1pA1pQi7sixEF67llB7FyG3BwBbeiJyYhJGh5XEGYWosowu1Ic/GkNXIELZ0XbcFfvOqyud0cz0O++lrydI5aZVVzRhnwy+HWz1qzfbyJi9nL6OFtwV+y/7GmMJa3wqqX3VqF1NJE7MIdNpweJMoX7PBlKmLmHBA3fxg9smMPXw3xE6dHiqqgm6PQQMeowOG5bEuIFzCVIIq0HAF5Eo3XGM3roSVEU+azGkSFGaDm4lNmsCWXOWsmRhNm+9uv2yM7wNVgf25Fx6648O+t1QFYWO0p2YYxM/MoZzfN40Cl02jkREtjd08Zlbi3gl3kLdsQ5SY81YDTq+uTgX/as/xrs+gL2oiDmuFKL1ZaiKQsQbQPG40YV99KoxvHmsnbCkICsqHx5q4a75WbT2BjlU3IIU8gFgdqYQcLfgyskle8HtNO3fOCTPvJ6awyRmpZ3QcckVneuaM6wMVsfAysRgdfDTn30J16P3sKHVx9N/+zLffGIlf/1VN2FvF4oUISIrGCfOxdHaROVbu+iu7MaWZCPn5hlYMjIxFs0glDmTPllldaWb53YWs6AwAbtZjzk2kZ7awxctWzToY86CbFiQzdrn3yLU247JkcDkm27CYNJTkO7grqmphKV+i31KUgyiCMV1PRh0IjFp+ec0rES9AVfRHGKTk+hubLgmy0PEpOajM1rwNJaSNmU+WXuf55AvSqS7F2dPFX/61Gxuq+lh/PRU/vTgVFwHXqOqoZOIP0rzvoNYXVYSJhtwzp5Bws0rIbWQXns6Gxu8/PWVI+x4a/1FeQVVRaGvvY7cOfPxphbgba446z0mRwL5C5aQlhVHKCozK9dJtsvGuqNtZMRbmJnt5L//fpD24/sJdLeiM1o+coaVqDdgdqagRCOEvZ0A6EQBx+2PgqoQFWFioo0Hby4i56HpfFxfjuJxU/nFT2KMsWJLdSGFIiTOn4EuxokuMR3jjGUollhaRCf7W/po9rrZWtZJR8Xhc8bNnSpHQoKNxdPT+N2++LO8kaLeQGzmRGKSMwn0dmKyxxMJeEjOy0AUBCq2bUYK+ZBCfmJS88gal8C+N5rP2oZSFZlYl5W03Ensrjm3XNcSwd52Xu+Zz4rCAuxyhE9OimflX57gYKuPW62tSDVHUasqqHtnEzqzkaDbh8Vlx5bqImbKVPTj52OwJ9AnWqnxS+yvcPP8znpCPe3nfKjqjGam334bBpOe+uOdJDrMJObmnHNuNDkSsCVlEexpQw4H0ZksRP0e5EgQVVGQQj6SCvIJeTvP6ZGK+HrGXBzfldBZtpsfrsnn9w9M5fPjjITefZon7voY7Z+cQUpb/31WKo9SvesY/g4f8trDWBMsFH3xcUzXjUMXn09LQKLdH6WmrZcX3i+npeQwmdNm0HK8jO+9/daA0WxLzEJntHDz/UtZUJDAz/+8m6wJKdy05DP84+k3CXQ2XtFnkUJ+anZvG4rbcm0ZVnqzja9891+4LtvJU3/bj6qofHZKHPXzM5mzcAq6KYv4iimdiPQwrz23lqKFc1i30sSex75C/h0zKbznOnRx8ZimLMCfPBkx6iOqN9IZlCjvCvD0+koOvPk6h5NzueeRFdz20I288cfOix5Iod52Vv/5ReLzpqFIEWJS83ng8dv44c2FCPQ/VMQT/+uV/tWXR9aTEW9h3aZakvMycFcYzpqIE8bPw2iNJSXXRSQsY7lGUsxFvQFbYhYhTydZc5by48/MBeDL/72eSFhCXv4EszPyQYoiyBHSYwz8+euLuN4lE1n9S469so3K3S1MXJ7DdT94GGPBVBSPG31KFlFXLmGjg6aeML/5sIpdL7x0SYZN1O/BYNKRMWUqpWcYVnqzjTn33cFLn5pJnBghJJjwRxXMepHPTnKg622iz5VI3wNTeHOng32vvviRWN1C//Zt3tx5NB2vxJ6QxsfvnEAwIvHiP7bQXVXMh8XNzM2axEqXH+WdX1E0fhZPZTuQPZXIPV7C1aXIEQVHbiqxkyehT81BiHGCIIIgIlti6dTH8/KhVn79zIeEPV0Ee9rOu6LVGS2MX3Q9VoeZI7tr6MxPwmiLPc2w6vdAyEy4fjp/+9RMvGGFdl+YSneAQpeVeIuBZ6elUFzRRckHG+gs20NKzs04MorO8krJkRD1h0pwpOYg6o3XnGE1mLdGCvp5eV8jdxc6CP7jp8ihCIGKRmamughkJiFHJQwxdhy5qfhb3VhcduRQBJ3ZiDFnAh32LDoCEnuauvjvvx+k5dA2pJDvvPdO1Bu5fnIK/zIvk19uqeEPv3uXgLt5UHltiVl88l/u5is35PJOeScbStqZnRfPhkMtNJS20HpoI6qiEOyLkDZ5DtVbVg35fRtrGO1OEorm8N/3TCb98Osoikzlm7swrDtA/n2LCYs6ug6WojebsCTFEewJEpMRR+bKJahTllPhF3lhWz1Wo47VexupPVQ+EPBeekYygs5oZvrKpXx5eQGTk+xkCh6u+8EtPPrr7ez0hDCY7UPymS60I3SxjFnDStQbgH53b1z2ZG66bxFxVgP/77oEOLKB5T+4mxt+spVPvl7Jf/7gr+hjDHiiCpGgzG2TUmi6YzE/vG0CUtV7mJ0WRJ2IZfYSpNy5VPtkVu1v5u9ryolPtpPktNDtj1B7pAY5EiLc182h8k6kqDKwr34mOqP5ROCtfNokI+qNFMzKR5hTwE1TUnhkWipOTx2KOQbFEosgSyCDztuGajBhiUnniXnZRCSFsmYvE266m6rtm06b9JPzsvj4igJe31RD09FSfO21w3vzhxFBFLG40gH41lOf4P+bEce7jVGyYs3MMPUi21y8e8tkdu9rorI7jJIwn6xYI87GvZiDbq6PF4muf462vcdRZJWJy3PIXDoN48xltDvy8ScriEB3n0SDp4c3ipsp3Xb4kowqQRSJBrwcefcNYlILSJm6ZKBUg95sY9rtd/Hkojzig63QWIo9MRNzXAai141qsqN2NWHXm5iZlsDrioog6lAVBVFvQGe0XBOVh08iiCKxmRMHvpN3fPI2nrk9h+rgfH6xqZpOb4i+kETU72HmfQ+x5ssL2FrvYY3bxsrF9xPZ8Tb+hmYaPjxKb70Hi9NMysxM4hcvR+dMQrbGE4rLpCso4Q5IxKGnqsPP394qveDW30miAS9lW7dz+yfv4LoHZrKzvJNJyxZSvKqbiK8HQRRJnrwIKRIkO9lOslVPutRKUUYyc9PsyCp0BiT+9fpcDuYn8O+lDbSXbKPuSBUFsycMmh3VW19yWjDuWMdgdZA1Zyn2OAt6g0jJBx8Q9nYhiCITbrqXTd9Zgt3bhAw4bn6A0K7V+Jo7KX+jGIPNQNKUFFLmTiB+/lziAaQousR0pInLKPdEeXVfE0caekmNM+Pv7rqoxWw04OXZv6wnGFnKV27IZWLa/fz9wyqOb9k24N04WTx40cIsHpmVQZavikemFDEx0U4gKrMg28k/0hy8WroDORKi9Uj/1uJHBb3Zhi0xC197LXIkhN5sQ9QbsbrS+c6/380XCiC68680bdqDKiukzM4jNj+dls0HcBZlEj8+G8FkxpBZREZyFrLNRZsugd01Hl7Z38S2d3cgGoxE+rrPu1sgR0Ic33GUP5v06ESBxxdksyLXxLqnlvDIc8V0OZMHZBwNjEnDypFeRM7MGcyZksyx+l7WPzEBsa4YJa/foyFMXoqgSHx8eT67q9w88vvdqIrK/z0+m9lOBVxW3loQ4MMVyymdmMCEJ+5En5KFYHXQ5JOp6AqQGWshKTOWwx/sotaVRtuRTQOTYNjbScn7757YvglijktGkSLIkeCAYpc+9gmee2QaR9sDfOPvxZRv2UjY20XE101zdTdffHAq89Lj8IRl4l156FAIyAI2UUHX04QY8SOLOkxRH/lxdr53YyHNfRECUZn/cFk5umH7QIpx9c6tbEiNobvdN2Yna0d6EcvuW0ZhSgxzs+IIRhXukw/R9fs13HPfw/hdM+mWTBxp8bP/UCvfeHg6E+IEfrWvE6fVwN3b3iYmPwdjwVQO/34N7soeUqYnk3PzLKzLPoZqjkEAjnX4eftwCzUtXvLSHKx76X187XUDad9WVzrJ46fh7/HgripGb7ET6ese0Ks5Lpnv//AzVLb18c7b+3FlpPLth6bzrd8YaSn+AKsrnc/eVMSkJDs+uxNbvp2oyYbO14kghRG9HeBMQlVk8uLM6PQipthEgu4WbIlZpE6cStWW964JL4aoN1C49A4mT0rm4IF8wsEI31xegL6nhoKYZL66JJ+9TR6+/9vNRIM+fvzx6Vj3vs6t02/itXqJ6J7V9JRU4Jo3mwlZ6Xir6tGbjcRevwylYD6diom+iEJpTS89wSh13QEO1vUQjMp0N1RjciQOrECNdiei3ogU8mGwxaJEI0R83QNjJT53Kp5AlG5fmJ/dO5ncOBOfc1p47/d/AiAhK4nP3zEBp8VAY1+UWHMycXIEq7cdxeZCZ7OhAJOT7Sy+aQpvVOyjp+YwrmXTiEktGDRmYyyO08EQRJE7n3yEX909EV9Eodwd4A8JNsqOtOHv7mDx3AzMm/6Kt74eW9E4Aq2N+Nu6SVl2PakrbwJA50oFZxpybApe1UgwqtIbllmzu4mwpPDaugq6m1tQFRl/RyOi/mzP/bmYne1EJwo8MTmWJyZM43eLcvnO13+JHAlhsMbytU9MZ15GHK8cbsE8LQ/RL5HuMPHj9ZV8en42n5qbReXd97H/1ReIBrx0le0Zzts5qkiZuoQXn1pKgyfIpopOvrAwl8JYkSqvgiiA0FUMooioE9n14mHm3D+JlE88Qd6spf1Oh9gUhLAX2ZFKuRxLR1+UZm8vv1tbTsAb7g/bOZEheiH6Wqo5sjsWmzOG6vGJzEt3kFC+nvcnuml+5OPc9z8JlK59HVFvICa1gMLrZlK2ZceIJH2NOcPKVTSbL3/uRvLirUxItDNx8mF2rryXKU8swZ6QhZSQR31flJaeMNNTHUxPdfD+8XY27W1iYqKF3cuWU3DbFAJZyaTNSsFgM6GfuYL1fU52lHXjCdRS2uxlVq4Ti0FH6oSJ1O7eclqaZ9rMm7lxRQG3Tkrhf9eWs2RKCjFmPZtKO/B6Q5Ru2k7FkTZ0n5jOImMLL37pOu7yhqje0h8QW7f9HX7a0cLsZTPo7PTx9bsmcUNWLL0hCcVixGFzIfh7EGU3AObuOow2F3ZXKpXdYawWA7c/cstAX6VQbzs7/v7iqLHWL0R8wUy8TeUD2zOi3sDkZdfxu/sm0eCJUNsb5N50maafvoTRYSVSdQSrxUGrKZvSTh+u1BieXldBVCnk4amppAUb2fHqXvwdm5n2+DymfnYFOmcSosWGrnAW7fZs3EGZsgYPBp3ItKw42npDNLoDCKIOc1wy+QsWAXDHoly+OD+ToKTw4w1T+OScTH61qZpNr61D1BsJuJv5+9oKnv6XeUzJiOO7P3ie73rD9J4IUvZ3NvDs5mpy7p7c/1ktdoQIuKxOFEcqem8rNJQgGM2kmGN45pOzeMQb5tia1wn2tOFpT7tmtodSpi7lWw9NZ9WhFsLBCJYYEwUGP9EjBzBkT6AwqYCUcS740hJe3JPHy8VNvGeeweckK/dnB/BsrsPX3IULMM1aTsIcPXJsGh7BTLk7RIe/jwZPkJoOH5PTYznS0MvSiUm09gZJvOcGWt0B9r3xFtGAl4UP3ssnrsvi6fWV3H99DgC/+tsBmve/D4CnsYyMFfnkJdl57XALvpBEc6sXVVEQRBFFUpiYaCfBaqAvLFPVHaTRE2RachLpihGnKGBCwawTmJnjZI0zBV97HdtXbSZ6Iuh2rONIL6Kvteosg9DsTOFHt47DefwD4iIhMictZsr9U/DcOZGDrV6e2VjFtwoW8S8fy6ZA34fF78ZYXwpSFH3OJGSbix6TC1kBWVJp9ISJKgrrK7uIs/bvTNy+NJ8M52Tq3X4O1vdSdbCJ1kMbzpIlLnsyciR4Wl2rwngbGZFWug3pNPXBewdbBgLdLc4UcuIsWPX95Yj+7eVDPHJ9Druq3dS39fHlP+3htsV5fP7mIr6wNuGaKsEiiCIJ4+ejKvKgxqLJkcA7319BwdHXmOJxc0/eZMRgGMmaR4cP9jb1UpAwjnFzZjLu+vu57+b3CDXVoxqsyPE5IOppiRqxO0T8UYVwWKauJ4BOFPjuPZM50uqlb1EuEUnm789vvmBcsM5kYd6iQrr9EfLjrbgCzYiJaeiSMnAHo4SD/XOmLTGLxz57K7eOT+Y7ikrxmw1XfREzpgwro93J3R9bwG3jkvCGJawGke61bwEQ7QsgHd8FC7MISyphqX+LzqTXcfvEFBYXJGDRCWTdkE/V6qNMedzJ+G99FSU5nwbBxU/e3U/x26sw2GKxutIo+dCPaDAiiCIRX/eADJlzV/Lw3RP57mwHnud/zq2f+z76niYixet4/JHPIAoC95n07HnlNRb+h47bl+dj1DfT3VB32mfprirGPX0cXc19PPXHPfx/n5hBkt1IkcuGOc6J2eaEnlZEkxXFHIsQDQPgCUs01fVQE5QGCpkCY8aoMlgdZEwo5HhL1cCxxPHzuWdeJp0BidreIKl2E5F1f6Jy9XHybioERUaM+HHF6ch3WgE4/N4q/rttOYVfuR7iMpm/7m26fv0fNG6rQApFyHziZnyp0/CEZep6Qrx+uIXC5BjmZsQiqypGvcjR7SUE3M2YYhNJzYilMMXOynFJiAJkBBr49e0F6Hqb+f3HJvNsXjxbyzrZ/uq7lK59gwfaPMS47AR72mg4ZUWkN9uJc5jpi0hEFQV3UEel20+sSc+ERJHs2FRs9iYEgxEFSLPriUvs7+ouhfzDUrbjamK0O8masxh3XS19rdV87xkjnuZqAu5mnvnt1+n6w4/wVDeTfXsvhvkujHEOJifbuXdWBqUtHjbsbGBvWSdmm5F545/kPz6Xg75qJ6qnE+KSCeuttHgitPrC6ASBeIsBn8NMX1iiqztAfZcfWVFx2Y14AtGBMeL3hZmV5uBbd0zEatARVRQyxyXSeqjf89HXWs0/fvsiqVMXEPJ6kSJBsiblkThxAd1VxcQl2rAadASiCsWtXurcAd54v4J58zL49opC7EYRRRDZ09zD39dWEPL0B+MPltgw1ji5mPzHt5fxnbdKKH537WmxKEs/fjtxL/2AQ2sOEpcXT+btvaROXYTdmUtYsvOJRbm8sK2OW94+RMGsXBRJwWrJ5lf3TSbFZsAoKCBBVFHxhGUCUZm+iERbb5AYs56UGBMum5F5GXGU2Yxku2z83/HOc46TmNT8E0WBo3iby/nh+2X89aFpBKMKe5o89LT7cKSPG8jYjDX3by9NSXXQ2hukoy9Mc3cQk8VA46F9PHOkmOTx085bgmWsIeoN3PiZx/jrQ1P51AuH2XCGYSXqDXz9u09g+N7jHG3ykDI7l+TUXOSsKQT0NrLjZPoidv7j+WJMFgO/+/RsLFM/jn22iFsFUQadIhCSFJq8QbqDUXKdVgpdNgJRmXSHiWZviJQYE4oKaeML6K07et4FZdDdwgcvrSZj6mz+b2MVtzw6HkEKU2/J4at/3UdLyb7+9/W0kRFvZXyChVvmZtLXcxdSRKHxwKarFmYxZgwre3IOk1cs4u6pqcSZdcSadCSZZHyiyMx/uxV9ag6IOogEiDPb8Ib1rCnrYPXWWno7vERDPormFPG1J37Bys/XEak6AnEpeM2J/HFLLce37CEa8PZ7gHraSJm6FACD2URPjW5g5epMtmM16ni/TeD2lR+j7+WfY504A/OkeeiECDV+He0NvUghP9VbVvGbHYYTWxCnB8+qikLlrmK+8bV7cFmN7Kvrpi8k8Z0bi9CrEoh6BHssqs6I7EhGDHqQFZVAVCY2wcr+118bM8bUqYh6I03HKzHYYrE4U9AZLTz9nZVMTLTS6AnjDkQoctkI9/pIn5uKsygTMSaOaFMVcQYLefE5rJyRRnvDjdRsW83dnz+I1ZWGFPSz5K7P8OKzNgIbXkYVdaiqSqsvQrM3TEa8lVd21LEvzcFNE5Pp9Yboa6lGjoSI9HXT0dpHdoKV6p4A+1s8PDg5h5hogBI1hYMVbpLsJrzeENGAB1VRBip/Dza5z8hxMjXZji+iYDWIhCWZkKRgN4oYBQXV7kIxmEBnxCTIXFeYwG6jBSnkH9NGlatoNn/5wX3kOy188bV4PF0Bjq5+A0WK4sydxoPWeg5sK0cKRYkrr8WVfARbqocs12S6AhZ0Iuw91s7Bd95BCvnYLur4gyudjz9xJz9bOQ5z0I3uRJ1jnSAgqyoGUcTtC1OQFMPtczN54f1y2sqOE5uej6+rhWigvxzDgTdfZ2VFBxNnZ9HTHaChtI7uquLTJvKwt4u67e8A/Z6Pz90yjtlPzOWh/00hO9lOglVPUFK4LjOO67OcTEl1oNcJ6EUBoyigjwbYUtFJ+YdrrpmSJzqjmYWffJg3H5+FpbeePzw8g2V7jg0YVoIo8sXFeTR+q4Seml7SFxSgRkIIER/WaB8ZDjsWgxOrQceP6nvZ9cJLA10Q7ipfyafvmcS9E5Ox6AVEAboCUZr7QrisRm6dlMIHx9upFwXsZgPF9T3sOdJGd0sv7nN4N86MXZMjIfa8u4mHozI3TUnh/skpPPTUYrqD1/PI07vpbvdjN+qwG0Vmp8UwLqH/wb8oN55Ys4GfxJhY9cfnr6lgdb3Zxs9/+VXuHJfI/X/eT/G7a0/7vag3kDbzJr5lKmb1qnJSJyagN5v6k370JixEyTAr2NIdfPuh6Ty7o47t9T0k2ozMy4hFBPqiKu2+MGa9iNNiIMFqpLUvTFcggkkvUukOUFzfQ1N3kJunpBDsC1+Ulz7Q2UjlpmbC82+jPmIiy5VDZtjPG5+bR/snZ7G9voc/rynD7YtgN+q4b3IKC7Kd6ASBX2xIZPPzL12VsTkmDCtrYia/+NFj3Jgfj1nfP4kd7woRkfXk3vUJkCMIqorscUO4D53JhqKq7KrsomLz+wM30l2xj0feSyRt8hwevG0xMXV63j1wgNKtB09bWQqiDmuslc/cMwmzXsd/dnfQW1eCq2Amgghv76jDuDiPnenjmPnwTOp8UVYd72CxUeWFA3U0H9k7cC5Fip7zCxNwN1PZ1sf9KwqYnxlLszdMbqwBnacZ5AiIelSdATHUh6BImNUwMUYdeoPuNG/VWKI/oFXHx568n23b67HYjcxPj6EnJFPpDvDffysmJSeOD4rS8LcfoPNgJQabGVP+RBBEFBU6vCF6mptQpCiBzsaBQNS3f13CnKr7+dydTzDbEEuoM8hvt9bgC0lEJIWG0hb2v/4aR1bcQUdd84BbPy5nMkaLnnUbqqnvClBb3klZaw6FKTH85Z3j1O3djsFiH1gFn+RcRpCsqMSbdaRaIKjqmJxkw2US+vXo9aEa+vtlqYKALOj52JRUXpu9fOChPqoQhNNqo51MyjjzO22wOvj0Y0s51t7Hl/53OzmTkrDYjQPJHTqTGUGnw13ZjacvwvgH7ehcKcgxyQQlhba+MO8faaO1snkgvV1VFHztdfz1p79l39H7+fLdE3FZvYQlhbCkEJJkfBGZTXub2G7S0VbdRtuRzcDZ9aPkSIiW4g9oucgqJL31JXzrZ0Yee2wZj98xgUSbkUSTiqj6UGwx9EZFJifbcZp12A0iogCK0cqn5maxaeYy6ne+e2X3fQQ42QbK21w+oN+4nMl899bxtPmjPPFiK+7WioEsR6PdSeL4+cRbDLQWt1HmDpLX0Utiag5SfA5+fQxufxR3IEpUVskodNF6xE7Y24WqKDQf3s1r8RZSYkwk2YyY9DoCUZljrX00dQdoaPdxaPU6gIHq6RcTW3XmuOxrrWbHc7V4b7ufHKeV7mCU2ekOfvWJWTT1hciIMWJUI8TEGOkNK3jCIkadgEkn8JPbx3O87FZK174+DHf86nKyGHFC0Rzy4q3c+YutA59LEEVSpi4lb1omDy/IZmFWHNuWL6c9LJF64u9VWUY12XGHVSKySG1PgJa+ENkJVjaUtJMSZ2Zcgo14s56+sEQgKuMJS3hCEtU9Af701jGMFgM6nYgUlWmrqsXXVsfUbz3OrUvzeCd8Ow2737vg51CV/urtf9xVwFNL84ilf87NiDHw6SlObi5YSKc/SlBSSLTqUVSIygo/un0CnyhfcVWM5DFhWMVlTmBOeiwRWaUvIlPXG+LHq45x1/wsvpYeRupoQupsxpBV1L+/q8IrB5spfm/DadapIkUJuluo3rKKn+5ah95sH3TPPL5gJvfcWIhR11/Ucfx1E+kbl8vdN+Sw8Vg7iqRQ3+Xn7T2NfGFFIR+Wd7BubSnPuRw0HdlPqKftvJ/H4krDlT8Do0mPpKi09UVJsOqZlx6DEPKCLKHqTaiCpT9TUFVRBXHg71Xlwv0ddUYzcTmT8TQcH5HVs95sG3Cdn+mxs6fkUFrtpq1kF3Puu4Nf7Wzgw0Mt1JfU01G6g4zCT9K59zB9rX2kzsvDMnU+pOQTiMtmb2knr79z/JyFFY+teY2vbnYQnzetP8Mu5KOvpRpbUiYBdwum2ERkSSFzQjbepgqiAQ9phTlIUZn2km14Gvsr3W8x6ljV2jmwNXexabimmHgAqnrCJNkMWPQqLqOKrq8DIdQHgoiqN6FYYlFNdoKSgl4UmDs3g8a9g/cZG8lig6aYONKmLx2Y8JInL8KREIPJYsDXG6R+T/8Ys7rS2VXZxdENu/E0Hqej1Dmgf73Zhj0hGamrFVEnkjczBUtGJqrNScSejOKXqOsOsO2tjYP2fFMVhaOrX+Mr+3MpWjiPR5bmA+AJRnljcw2VWzcMJI4YrP26lyJBbPEpRENBuir2XdYWQHdVMb/9aS2PfeVR7l+aj0cSsJidBCUFX0Qm1qQjQR9FiIRRzDH0RiAtxshj907iv4o/HFNeK4PVQeGim7jl+hze21RIxcb+EAtnegZvHWlhX1knJR9sGBgHjvQivv3N+/nSRAs6bw3FExOIFrfR19yL0tuJKIVRdP0p8F2BKKuPtHJozabTwipCve2Url/Hmmwnc/LikRWV4roe1r+2kWBPG4Kow5kzmdjkBDqqKimYN42HF+fhC0u8uLaC+gO7LrrCuaoolG36gH+vbSHY044zexw/+uxcFmbFYpICqHojkRNDTFbVfi+WQUSQI/z347O5Z/vGMVefKiY1f6DUiN5s44vf/hdefHk3kYCXL/3vdpr2rh54rz05lw9+egdpa39J7f/sw5jkwNsVJM2sR47KeKqbcdYeQ0gZjy9ioLTTz1931LHr3S1IJwpkx+dO5ZaJyXQYdPSEonT6I9R0+Nh7rJ264n5PYn8GtHxazHJEkvn5DPjMvOXc+QNo3LvmgvOdr72Ov/7udWTlPp6Yl01Zp4fiJg+PzkrHKKo4LXo6AxJ2o4hZLxCIqKQ7jPz4M3P55IGtxKTloyrKsNV7HBOGlb+zkYd/tY3e1k5EvZGQp4uAu5mJOQ/RNHkyctwkrJNEOvwSB2o9bCovZ93bu85bMEyOhM56iJ3sH/joHf3ptOuOtpGdYGV6dhwP3DmJXKeJ9YdbObBqNaXOFKaumMt7R1vZuqGU9pKttHHhTJ9xN97LTx6bzcxUO+1+iWZviKii4AnLxJl1mOUoYrgPxepENVhRjVZ6JZGwpOL1yfzH20fwuAM4MsaRWpRP6do30BktJE9ehCJFBnqh6c12ZiyZztGdFtpLhqbo2aXgyBhH0bz+AO6qA+UDwZF6sw0p6OfY+vVEfN0cWrOJPa90I4V8CKIOV9EcvnRjIRn6T5N0/TFY9hhlPRFePtzC+zu3ULNnF8FBatGcSjTgHfjMJ1e4EV830+/6GN+9dwo9oSgNPUHCwaVMnZjEt1cUcritjy8Vlwxkb1W01160MaMzmsmYfSPe1gb0Fhvjk/sfKKIAdp2CztOKIEcRVAVVEFENZlSzA19UwRdVqO8Ncvho26AxHEXL72HJdVk89+sXRiRwNjfJzi23FPL35gmoisxNN43j8wtziTGKbK3v5dvNjfS1VOHKKaJ02+EBT1E04MHiSudTn7+Pny9NRYgGQdSzbMvLoCoE47LxRBW2Vnazu66b9TvqB60xdBJVUehrraa1Jp9NKTEcP9aOFFVoLt5MbNYEUgqy6WroQBB1/PKrS7krKUSzPpHdjV6++jPfZWVyWVxpqLLMgbJOdMvy6QvLBKIKFr1IjElHrF4BSUE1mJARUVQZWVFZkufijRW3UfLeq4OeV9QbcOZNo6+lmoiv57TSMcNNTEICuNIGDBJzXDKz7rqVT12fQ1asBafFwIcHmtAZzVz3yEN8bnEev1pTxsG330COhPrDIfKms+P3n8K16qd0ftiJLT2RSZ+9nUmiiGhzIMxaiUdnp80n0eYLIwrQ0OjB33l2EHHE18Pqv73JrrxphLxdBLqaT9tmjEtN5MaF2XRMS+X2KalMTLIzzhrhkWmL+eOePP74mzcvugFzxNdDe8k2BFHEYHMgCpBg0eGXLESiKoKgEFVU7AYdVr2IKPU/H25It3DHZx7kzV8/fc45QdQbMMUmYjDb8bXXokhRBFFEb7YPVAzXm+1IwpD104ZBznVyvsuafzvvfG85n/rjHg6vepkbPvEg1+e5+EPDcebefzcNxztJnLiAr31uKTUdPiKSQmFfKYdX7UZn1GGwmVny/dsxp6SghvxE/UHUaAQMZlq7gpS299HTGyTi9wzoK9LXza/XOvG4A9y5NJ+yFi87PzhAzylFcM+8f6qi8Mz/vcyO5UtZMdPANx+fxVONFRcVmxjobORPP/8TbxbMJHtyNjfOTMcdiFIYb0EvgqSALyJjM4gY7AbsRpGV6SK//d8vU+MO0NYb5B+/Kj/vAsjkSEBnsgzeE/g8urwiw0oQhDqgD5ABSVXV2YIgxAOvADlAHfCAqqpXZOp7Go+f5doXRJGX//gqr/xJh8Fsx5k9Dm9rA/7OhsteWdiTc1n97aVkhpr4da2RqVlxZMVb+gPw+kI0eIKU7zlG2NtF2NvF1r+UXLQ34aTRtu/7S1E3PovBvhg1JpfDbRF+v6WGReMTWTkuCYMjHouqgKgfKHp4MoX5J++WcmzjNoI9baiKTNr4AiyudOIyJ/CXbyxhY1UX/1u2GynkRwr5CIYlYpMTaL/I6vxDqc/euqOocyehN+hOWxWYYxNZcNsCqso7qdj4Fr72OnRGM0Z7PJNuvJE1X16ITepjW/dEih3p/P6r76M36Gg5uvuSi7cJoojOaDkxyemYPSGJQpeVsi4/bl8Ym8PEd24spMBbSiRxwmnbq5fiIbIlZvHOfywn16Gj6PNv8MvXjvKbz86lOyhiNRiwmewIQQ+KwYJqMPf/kSLhi0JNd4jfb6mh8dDpNZd0RjOp05fzqTsmMCcjjteTsgh7u9CbbRhssaeVgBj0sw+RLg06gX+7Poc7Jz+OoqjkxZsx60RCskIgKp+o8XQD//OF+fzkXQf76o5iik0kadwsfvTZuSzJiYNDbyH39WKYthivM5/XSjs4tLOcsmYvpVsPnrNFyGmfRxQx2uNpPbSBNSX/7K0oiCI6vRFvVwApEiTs6eKN4mbiF+QwPUVHTpwFV2Y67grxRLZl5KLH65QblzExx8kbz63h+wUuvnRDHklWHXFGECIBBL8f1RKLpOvXqVEn4ItChz+CyXz21HryMxRcv5SbF2Tz+nspNO5dQ8bsm7E6TJR98Ma55Rkifea6rDz+g0/zgx/9A1mK8I2nHuXJOenESn3URk0cbO2jaudubIlZ3DQlhcouP1V7+uu8xRfMJCE7m6e/uIAMYwTdnMWYM+oQrDHok7OIpk+mKSBQ0R6k2dvFlvJOig+2YHOYqd69+5z3PdTbTkvxB2cdVxWFlpLDHExzEA1L/LbFy/Etu8ieNYtP3VJEpzeEqDee7+MOSsrUpXzzyYVcnxWLGPJiNDkISgoxBh1xYgikCKpiRtWbQY4QUHR8eXE+W9bOGdRA15ttpM9cxpcfmY4vLPGX10vorDxEfO5UHPEWqrZvRNQbyV+wiNKKd4dMl1ZnPKkL7hjYdk6ffSvpBS4OvvMOGYUuFBWajlcRmzmB+vIuPvHqKhQpgtmgQ1FUfvSVm7h7fAIWvYA3ohDRK0x+6ouEyw+iT8nCmDOBaEI+qtGKPhoioLNQ2h7k+b0NfLihgtS8BEwx8UR83dgSs/B3NrD35X8g6g082ziTsKfroozeUE8bNcXH+fWmrRitsZdUh1EK+Wkv2Ya/s5HFU++nyGXBYRQRVIU+CaIKxOkEXEYVBBXVYOZjEx00eqPsaepl/eRFA5nBp2KOSyYmLZ+siTnMnZjEqvdKT8tCFUQRg9VxTrmGwmO1VFXVU5fS3wI2qqr6U0EQvnXi9TeH4Dqncer2TJChyb5xpBdxvCuAKTmTKckBnqtrYGd5J77eEOPy48mItxI4w/V8sQ9gUW/kqU/MIPDcDzFYzehNNho9EX7x8mFqdnzAvsQsKh5azucW5pDlcGE3ihgiPgKyQJsvwh+21HDovfdPq71TtX0LRlss3uYK/rq7gLwkO46McXRXFSNHQpTvOU7Uf8lbIEOiT0WKcuDNNwd+PonebCcYkWkpOQj0b0FMvHEleoPI04/P4Vc7G6jv8nO4pJ2Gg/uvqAZJ0sSFJGSl0HS0P/tvzQdV1HcFqDzWQVtZMWFPJ7fU9PDlR2dQ1lJL6ERblUtFZ7QQb9Ghr97Nb75xI5956h984yUjt8/PYkmei1nJsaAqqHoTgiKDIhFBjzcc4bVDzexft5tAZ+NpK1xbYhZPPTGbRdlOwrJK1pQi4lIzGT8lmcIUO2+8V36aK/8cXLEuZUXFZhCZn2oBVaFPFukOyuxq7OW5DyqJ9PUQ9nTx/16001hSBkDyhNksuT6blYXxqKpK+5S7sRj6V3ebanr4z99uwttaTdTvuejtMrMzhbx5C2k6WkJf6z8zSlVFOc0jK+oNrHvpfdb8PUL+ddcT8kdoKz00oKczC/aeC1VR8PWGmJMbz9GF83j5j6/S4b2Ln9wxkRijAUM0iBj2I1ti+++TCmFZpTck8cbBZqr3HTnrnM686UxePJ3PXJ9LSowJTyDC683jmDM/k8kZsfx4u+OsLcszYimvWJ86ZD43OZbiT96GJxDl63MTQfKh6gxEQirP7qgj2NPvPf3Jfz0H9GdjJYyfx+++cyc5cRYmWQIoRidNGddhyVlInBghKog0BQR+uaWG1e8dIeTpwt/ZcMVZWP7OBo58sI2I34MiRYgGvJS8V80PD+T317S6iPpHZ2K0Wnliciyitwk5NhVPWMZlEhDkEELYjxj0oFidhMxGZAz4Igomvci4eRPOSngQ9QbyFt7EXcsLGJdgwx2I8ti9k1i918GiqanIikLdATtGaywrr8+h9VUz7f0avGJdGvQi33l8Fl+tPooUDvLvn57Dnho33utvJC81hhu/8gLepgrSZy6h5ejugYKt215+G1tSFktznVh1KrIqYDeIoCp4xi2nL3cpnYEoBlEkTtJR2xngtUPNVLb76Gr2UndgD0F3M+0nFjjJk28gMSeV4x80A/0xxZfiIVYVBU9DKaoiX3ZbmpSJ03l8VjqxegUx4CFoigNUnGYdoiCAqEdSIaIa0KtgN4okWI3kTEqm+ZSoEkEUyZp/Gzdcn82iwgTspn4TqXdZIasajw/IZ0vMQt9rPqc8w7EVeBew5MTPzwGbGQbD6lK5kLtd1Bv4+N2TCEsKqyvd7Kp2s+Gd3QMKP6Y3ojNaTosRuBTkSIhX9zVxh0GPZenHqCaBH75fQu2u/gaSnsbjvPmsD09gJV9dVkCyzYDVYKHBHWJzjZvirccG3QqasHgebXW9VNX3UJgSw8xlM9nSUkU04KWvpXog5ucKuGx9DvbQ7GutYvuL/5xwTTHxdNR34spw8dgfdlH+4doTwcsX9wCEfu+OIOoGgp5PHrv19ulkJ9gw3lTIL3+7lub979N6yHiap6dp72qeOhHwfLkxMb31R/mPtRX8163zcZe7sSVmcvi9VYRDt7G1rJPf3D+FLFtsf3yVIOKLKjT2hNlQ3cV77xw8rXhkQtEcAu4WBFFHqt1EVFGJygpP3TuF2p4AaTFm0h0m9pR30Xpo8Jis83DJuqzt8rO2qptbC+IRBR2banv4n3dLaTxef+IhEzkRA9UwUD1eFATunNL/wCpu7eNHrxyhs7ELmzOGzqpyvM3l591SURV5wBNxcnt06s3LsVqNmCzTOba+85zboooUHTDGj615/bTrXKon293UzviEuXztzol8esNGNjz/GpMyPsNXFmYRY09ENVgGYh/7Yz/DlHf52bG1Fk9j6VnnSy3K54tL8onKCr6wxMxsJ6tsDmRFJdVhJqFozsBW/kn0lvO26rhkfUroaJWMFKXEEJYUWiN6DKKBZk+EVcdaOLh+z8B36tT4Jb3RQlhS6PCHSbY5CPokPqzt5q/rK1k2M53rsp2sPtbGa8+tPWuRe6ZXX2c0M+W2u6k/dPyCvVZPlsI4k4vd/huMlkNbeKl8Fg8VuOgI66jpDRGXakMniKhWJ4IcAVVBUlS8EZmwpOIJSxSmxnDAHn+a59xgjeWz90xiWa6L+t4gUUUh32VjzvhEun1h7GY9Or2xv7q4JCPJ54yPvWRdet29/OLlw8hSBFWR+f0bR2k6sp9bH7mdI8c7B3YKTm1SrCoKYW8X0YCH2368iclTUnDZjSwtSmRxThxBSeHpPY08/49thD1dTFlxPc3V3bQc2nJirPcnrhjtzhNbZC3YXfE0HDx0RVnqV5rh3nxkLxFlMUIkiGx1YgKs/jZUvZmo0UlQVhHpL+nRE5IJywoVbj8V+852yMyYnc4T87Mx6AQMokggKjMnN54Pk3MGFsCO9CKoP3cC2ZUaVirwgSAIKvC0qqrPAMmqqrYCqKraKghC0hVeY0goWHw7SVlxVO6rorfh+KAT89YjrRys66Fsfx3tJVtPM8IUKXrFwaiHtpdx9Af/H1/+0356OyroKN152oou0NnIhpfeJTXufmZnO/uPRRXe39+Ep+H4WecLezvxdAXIm5hEVFYIRmQmZTjYE5tINOAlf8ESFs/N4JmfNlzsFsiw6/PM2DZfex2+9jpaD13aeYx2J/F504hNjmfalBSSHCY+2FZH9bb3UaQIiePnMyc3ni3lnRw63Nq/Nakogw7gK9Wrqii8+fd1SIrK28+9R8jT2d/Qc88uGs12Hm738fRn55Ida8QgqvSGZEo7fby6qYauin2nnSfgbsFgcxDqaafBEyI1xoRJL5JqN7GtuovXt9dRmBVHbIzpQo2bh0SXwZ5uvvbD1/jfKRMRBYHGkrJBt+5OvlakKC1Hd/PLD5xIUZnORi+1O9694HfvZNzRuHmT8LgDOJPtKJKCu60PvyfM5Bwnq97cQ19L9UXHml1pwL+oN+K0GHh2TwMRXzdWVzryiYm5OyjjslixIxGWVfxRhZ5glC0VnXibKwa9tt8bJiorWA06rAYdSTYjtvgkdmw4gstuJH9GFm1H/mmEiHoDZtvAVteQ6LOq3cftP9lMw4GdqIrM67Oux2o30d3uo7v2yDk9Bm1HNvPEF4vRGS3kL1iEKzWG0h3H6CrbQ/GbIiZH4kAD6lPRm23k33AzXfX9jeFVRSF95gq+cc9kvtsVvKQm9kOFHAlysLGXiUl2vvrSXmx2E396cBrJugjtsg6LJZkYPfiCCoGoQjCqUNrho7K176xzCaKOeIuBoCRj0ovkxFmIyioL81z8em05UlQm4vcgSxG2H2kjFJFhiHSpytGBJANg4Od3nnnhtEXmYPObIkWp2vQ2VZv6X78Ql8ykFcsx24zsf+u9gTG28+//NGBdRbORgv1OAFNMPDpjv2FVt2vdiJf+kYI+4kw6xKCHrZ399e2mGgFFotkX5Ui7j0mJdopbvRxq6mVBrouXN1XTVbb7tPOoikJjixezXqS2N0iMUUeSzUShy8qUBePwtlajN1qYPj+H43vFwYXhyg2rhaqqtpz4EqwXBKHsYv9QEIQngScBMNiuUIzzkzX/dm5fms9tE5LYPS2VHRUTqK/rQYrKNB7aR9jTiRwJsf/VF9AZL9kLcNG0l2xj5ePHT9vOO5Ogu4V/PP0m76YX4Uxx4ky2U7W/dNDVtqooNB0+QCgwlUggRE6ijevyXRhs/dsT4WCUmo7+jK2Y1HzaS7Ze6LONCX0CTFh+M//z6Exizf2eyKisMD8nnt8k2wn2RTDbDDyzppzKnTsGDzwcYrzNFbz86+bTJrSTD6mS1ioeD0v8+sl55MSZUIDG3iBt1c1n6cPbXD6wZfX3D6uY9vAMxrnMdAYk7pycyr6KLo5Xu7HaTRcSach02VtfMmhLlnMR6m1n1wsvnJUBdD7icqbwX1+/nRmpDirdfhQVTHqRnmCUkhYPm/Y2nbUNM9y0l2xl5bdkgj3tKFIUf2cDr7x9jOWFCcRbDPQEJRJtBnRiv1cjJCscLes85/ZUR/lBdtTk8ejszH7jKqTj7lsKefFlN3azHoNOHOgbeZJTpu4h02flplX/DCfY9PZF34+Tdf5K176BIOpOC0g+X/xjclYcj9w6jn+szqRy0ypScuKQZJW0gng87bMHDK6rharIfLCllgMVXRxesxZHRhE/T7Hzy+VpyKrKxhoP8zMdSLKKLyLT4Yvw+r5GSj7cc9aORTTg4dntdfzrsgKcFgOxZgMhSSaq6HG39uHv8RDxdSMEdLTWdCFFZRjmefZytl9Dve0cfPu1847XcfMmUXO4Hk/j8dPioEbaqAKQQj4+9sc9fGZFIe8cqmFqVhypczNIjHSwp9nD957ey7wFWTR1+PD1hthT3kV77eChHw2ldbxXlonVqCMz1oLd2G8mTcpwcNCVhsFsZ3yag7NdHf/kigwrVVVbTvzfIQjCW8BcoF0QhNQTVncq0HGOv30GeAZAtCZcuH7AZaI32/jMA1NYOS6JX22p5sjxTv705DwUVaW2N0jVreNYc6CZ1toeOisPXfYe78VyMavtk7WZ2k4EX8sn0lkHw9deR8DdTM51K7llYjKVXX5M9n5vV+OBrbSUWJh683IWTU7hNzXnbzY8ZPq0JZ6lz6EuGeD3hqjuDnB9VhxGnUi9J4ooCvz3/dPwhKP8YWst6//28lVtaHwuz5ciRWk4uJdvvWQkHJRYeX0O+2u68TSd7YZWFWXgPOXbdlKyooDpyVYsepGcOBOuOAvlzV58vaHzfi9Gemz2P3Qv3gjytdXy4p4GKnLjmZkRS6HLiigIBKIyGfFWuhrarnqbH0WKDtTFgn7dtB7awP9uTOfR67IJSTKvHvZwqL6XiekO7GYDXQ0d55Qz7OkkIikYdP11kuwmHRNS+gNg67sCuNt9Z3nJI/0P4iHV55WOw5M1xi4GKeTn6IcHmJ17I+MmJCGId1OUFUdIkpmb78Jhmckmd8tFl00YClRFoWbbu9SKOvRmOytWzmD7/mZal+Rh0glsqezkt+vKEXUiRrOeumMdhH2esxKooN+r2VLVje96CYMoYNaJyGp/WaCAx0ewp/3ENWUC7hYUWR7xsXkuLjS+Kg9UDmwvjrZCxqqicOD1lzj8np3YrAl88Qf38G5FF4/n9m/ltR7ayLulFnQmC7HpRZRt2H3OXRyd3ojLZiQlxkRPMEpYUpBVaOoOYnYkotPraO0Nop7n7l+2YSUIgg0QVVXtO/HzTcAPgXeATwE/PfH/RVfjOhllP5QPQzkS5I0ttWTGWZicEcvad/ZxuG0C12XGEYzK5MVb+fSSPI6N8/I2UD/MhtWlcPIh298g+NyGiSJFCXg87Khxs+Ng60DfuoivB70UYXKOk2NNnvM+iAFREISYodCnwerAYHcOeNkMVgeJ4+fjrioespIBVZve5usVpfzHUw9wW2ECle4A1V1+siaZqXQHOLS7biDN+XxYEzMBht2gDnQ2cvjdt1CkCMc/0GGKTSR8nmB5QRSxJ+fw9801TEt2UN0T4C/bajm2uZiAu/lETNngRrKqqgyVLq8WEV8Pu156ndCdd5KfaKMvIqMoKoeaetl+pI1w3+ioIaRIUXav2sCxvfkEe9oJeToJ9bazU28gaeJCegfZsj+JLTGL5eOT8ISimPVG9jZ5+OEfd+JpOM6GV1oG2uCcStDjGZa59mrSU3uYdzbm8o2Hp5N+Qy6lnT5+9o+DNB7YCvTfl1BP21X2WvUbh9GAh507Gwj0uvnMS4f5xPwsNu+op3bHmosy5BOK5rByeR52ox6npd97vrOhh2dXldJRuuO0h7ensZRowDfmxuZJOkt3DnrcmTuNaMAzIs2OT6Vfn15660qo6bmZbRVdzEjNo7arPxlDjoTA13NBIz6tMJWFWf3OidebWukLS4xPtLN/bxNtR/r3TvdZbjlftYUr8lglA28J/WfXAy+qqrpWEIR9wKuCIDwBNAD3X8zJrImZzLvrJox6kR1vrR8SJZ3Msjq6+g3+v0Ml2BIz6a0r4QdPx/OxOyfgspuwm/T4whI1HX5aj2wf9ByO9HHkzJxGW3XziNSEMjkSiUnLx12x75yDvatiH//4UyMhT+dphqkiRXh/bRk99SWnHTdYHaiKfKqXRQ9sHwp9xsdZCJgscMKw0hkt3L5yAh/usF7S1sPF8Ke3jvEXnUg0LHHr0jxquoOsO9pG+CKSDARRpOj6heRnO3n/hfeG3bg6aQipinLBwX0y3qpkYzXfthjo7fRTsXn9xQVgqwoMkS6vJoKoIyfNQYLVSHGLh4ikUNPhx93mw9tcftHnic2cQM7MaTQe6+/ZN9R6PRkXeCqKFKW9ZOt5jYOI38Or+5uYkxdPgcvGi3saBsoMnEuvJyrXD+lcOxLEJ9uZluKgrjfIs++VUbPtn3F3l5JUoDfbTttuv1JURRkoV9BbV8LRD3MH6lBdLFPTY2ntC5MTZ+Fwm5e/vnVsUMNMVRRQJBiDY/NcWFxp3P/IEtZ9WDPihhX0J0ZYnClsLuuk5Fg7Txxto8/tu+jviyCKFGQ7SbDq6Q3JzMiI5dfrKtDpRTrKDwzM4S0lB1A95979uWzDSlXVGmDaIMfdwPLLOeejC7KxGnTseOvC770YdEYLMWkFeBpKT4sTiU2wsaOknYyUGGbmOLEYddTUdA/0FTsVkyOR737zPu6dkMQLh1v58Q8rLrme0pUiGozkzygg7Ok6Z8aNFPIPmimjSNGzusALokhMWgGqLJ8aiBxRVXX2mX9/OfoUhP4sopPoTRaMehFP+6De7cvGlphJQpoDRVHpbuvD7YtQ1xMg1trfn/F8g8lodzL55lv4wf1TcVoMtHtWsOeVN67q1uGF6GutQlUUdr/0KqoiX3QsgyDqUFR1SMfm1SBz1iIWFCTwkzeP0l7bSeGsbNKdFqTI2S10zoUgiuTMnMZfnpxHbc8U/uP5Yso+uDpe6AtN3qHedtb89RW2ZxRhi0+ivXTved8/cN5hmGuvNmXb9vI1iwFFUmg+duyyDCNBFMlbeBMpOU5KdxzDXbFvSL1c0YD3koPpoyEfvojM8xsqiS4v4IWNVdTtWnfO7+tYHZvnIur3sOrt4nP2brzaiHojRQvnUVXbQ3dzG11l566fNhh6s50JaQ7sRpGIrNITjOLzhGg+Vn7aYjjQ2YjqO/cO0KipvB7obOS3a8owmPRDZvlKIf9ZHbN1RjP5BS4amz1seGUtqf9yN5VtPqp3Dr7aDHs7Odbk4dFpKdw+PonfZ02g7SobVmFPJ6FAhLjMotMMK3NcMskT59K0f/35Y6fO+FwmRyKJuTnIkkLA3TzkFb37AlHueuhm3n9pHVIkyM0P3UpxdTfuUzLgrhRBFAl5umiqtBANBeltPE5LiZHNqfkYzJYLtiqIBjy01brp8EeYmWrnqysK+fSHWZcUpD2cnNqf70JZiyZHAsaTCQt93UjDLt3w0NvazMs7YinfupWgu4XumgTsKTn01l28TlRFoel4Ff9vtYOvLS9g6fxManY6R0U7EkEUifh66Crbw9WvoT+yeJsr2PFcFQZr7EVt0Q+Gqii0V5SRnruA8ddN5KjfO2jc09Xm1Z311O4/zB+7+1s8jYZg7qvFyQKdo4VowEtrTRcBd8tpzbgvFltiFnMz45CU/pp0xfU9SBF58DCaE31QB2PUGFYAxzZsvPCbLpEzVw5yJMTuD48OFJdbs6Ga7trSc3qhVEXhjefW0BuIkp1wdoHQq4EcCVG2adPJbYEBLM4U5s3PpLcx96InGL3ZRtL4Oeh0IjqdSOL4ebQe2ngJYcYXJuTzoxMFDLZYBFGHxaijbNfRIQ0+VhWFntrDZ60wL9YoPxmI/PX/bKfsszdR2dZH8AI9Hq8m+TfcTOWmdy86ziM2wYYgCribOgkcuAoCDgPuiv2ntb842eHgcs7zflUxFcfvRKcXB20VdLUx2p1YnCnnreF1raMqyhUbuJ7G42z5W9VAFf2Rxl2xbyBbdTQYeRrQemgDcHkB9qoi0xOMUt0d5u1jbax+7wjdNYcHXQzI0fA5zzOqDKurtcXWVbZ7IK35YrrQ97VW894zbYh644itfM91b3SicGZ15vPSb+wI1O7biSDqSCycju6UbbuhQI6G2bDmIP6O/vpZ69ccxtNwdsHEkUZVFHzttfzxN28ONCsdDejNNu5cms96+30D3pvzkZAeiywr6A0iscnxNF0lOYeDoTK+VUUZ8ni+K8HqSid90gTKOxtGzfdsrKJI0aueIXouLiVDUuPqcCX66Gut4herjpGZHktFeef54ybPkxY4qgyrq8XlDIb+yXB0TYj+zga2fFiJzmTGYD27HcZghD2dtB7ZPhBP1hr0DRpbdkWo6mkZJKPJVXwm8XnT6GuvG1UPO1WR+bC4mR99fBr/aTGw9+UXzvt9ba9tIxrynYgrO7d7WmPkCPu6aTpaQkLRHII97Zp34xpC1BuwutIHbTKtMbZQpCiH3nqZw2J/BbnL1edH0rAaawjnUHLE10ProQ2MW3EPJnv8oI1Mz0SRoqd53UZD7MlI0lG646pPhvbkHKRw8JxeSDkS4uja91k3ORmTxXBW0cgz6SjdMVyiagwRwRO1mgqvm0lTGZphdQ1hsMYSlz2JYE/bRyq+6lrmSp8JmmE1BkiauBC7Kx53XfVZAXmqotBZV09scvoISjh2GYkVZsrE6SiSSs22c5eqkUI+1u+ox+8JX3DbY7Rsi2hcGLPNOCrS0jWGDinko7f+2KiI+dIYHWiG1RjAYHXwxQem0tRTyO9/0XmWp6O3rgR/x+gpbKpxftpKD2FLzMQcl3xOr5UiRanZseFCRV01xhA6o5m+7uA5W96cWQRYEEXOW4VQY1QgR0KasaxxGufuIqgxauhrrUEUBMYn2zEM0u1ejoSuem0tjcvH115Hcm4ayx6+a2CbdzCiAa/mjbqGUKQI1bsHr6sjiCLOvOmY45IHjllc6ehMQ5tYoqGhMfxoHqsxQF9rFc+uq8BiNxJwN4+0OBpDQEd9OzHxFvRm+6gqSqoxfPRnodYN+jtB1JGUl0vDKW1tRFGHwXTBZtsaGhqjDM2wGgMoUpSS998+0ZD59ODIoW5urHF1aDuyma4Km7bVdw2hM5pJHD//srL+VEWmp6WD0Cm11ALuZtSoluU5kgiiSHzBTMyORFoPbdA8yNcI/f13dcOmT20rcIwgR0JnZfAZrA6SJi5E1BtGSCqNK0EK+VEV5bzbgRpjB2NMPI89PJel9yy+5DGpKgrtJdtOm+gVKap5M0cYUW9kyco5fOWxWdgSs0ZaHI0hIia1gMTx84dt7tVm9DGI0e4cqOQ8bl4hevPZcVcaYwO92Ubm3JVYXGkjLYrGFSIFfZS1eImzagudawVB1JGXZCfBajytkLK2mB27GO1OMqZMxe6KJzZz4rAYV5phNYYYf9N9ZMy9DVfBTFKnXo8g6oi1GC4Y4Ko329AZzVdJSo1LwepK5/GPTSZ96tyBY4IoYrA6RlAqjctBCvmore+lvivA9Ls+NjBhJ05cgKtoNnqzbYQl1LhUjDHx5LisvHekFXNsAua4ZGIzJ3DdI49gT84ZafE0LgNnzmQcLivBviAPfWoF9uTcIV/YaobVGCI7L55P3DsJR2Isn7h7ItGQj10bDhHp6z7n3+iMZnKuuxF7cu5VlFTjYomGfBQm2LjxhpyBVbDZmUL2vGUjLJnG5RAO9vcU/e+HZ6AzWjDanXztc0uZt2ImMakFIy2exiXiSM0nyWaktrGXhx+YTcaM65iwaA7/siSfnDlztW38Mcjim6YgigK5U9KwGPU40ouYtHwp1sTMIbuG9q0YQ9TXdLOsIAEpIjM1td+j0VW257zVfi3OFGJd1nPWztEYWYzWWOakO7CbDQPtaAxmO4p07j5UGqMTVVFor6wiPd5K3Al92lNyCUZkjKOkGbTGpREN+Xh5fxPt9b2MT7bj7+ljdqELWVXp6w5qiUNjDL3Zxv0z+4tpF6bGsGZrLZYYE7fNzUS8hJ67F7zOkJ1JY9jpqKllvGsJnvY2vv3nvQOeKnNcMqLBSKin7awsB4srDXebb1T1wtP4JzGpeSgqvLmuYmCS7mutIuBuxuJKQw4HP/Jth8YSPbWH2V8zl42HW5EjIQRR5H9++Rrhvu6h78mpMex0VxXz9u9LiM2aSGWnH09zBVsPJPDh7kaaiz8cafE0LhGd0cLa0nbcbX3oMmOp3b2FmLR8Xl4nDmkpI82wGkOIegNP72uit66ErrI9A8en33Yz4zNj2XekjfIP15xmRIU9XfS1VI+EuBoXga+9nk/+eS8tJfsGjqmKgjk2ka/9+4PUd/lZ/c6B05paa4xujh9qpaOsX5/uiv0Dx62JmVicyacd0xjdqIqCHAnR11LFX/8mEnQ3c3T1a5qnaowS9nbxlx//Gr3ZRkdVARFfN91VHnpqDg9p6QXNsBpDdJXt4f9+2X5a7SNrYiYrZ6UzI83B/DwXP++L4O1sGzC8vM0VIyWuxkXQW19CcXP5wDbgSQoWzOeOCck09AbZeyiNztIRElDjkmku3jDgrTI7U4j6+z1VE5cuIhqWNcNqDBINeHFX7EcQReKyp9BTe3ikRRrTiHoDmXNvITHDQcWOfQiiDp3JQndV8VW5vhTyD+iw30ge2npWmmE1xjizcrM5NhGXzYhJryMz1sy/PTSN9w4lsqnmsNZpfYxw6krJ5EjAnpLDF1aORxSgKxCh+dixEZRO41I5Oe7MzhQ++fn72X+8g8bj9SyamspLrx8cYenGLnqzDWNMPCZ7PM6MHBr2bbjqIQ6O9HH82xdv5c8vp2kFQ68AU2wi2RMS+d39Uzm8cjwGXX+49+d/wFUzroYTzbAawwiiSNaUIqYkxRBr1uMJSSRYDViMurM8IBqjH73Zxqy7buNzywqYmmJHUuBIs0dLPLgCHOlFREM+gu6Wq37t2PQibpmQzLTMOFYnxxBj1uPv1JqlXy4xqQU8+vhypmfEEYjKPFVz/Kp65HVGMxMWzWFySgwfv3sSLysyrYc2aNuCl4EjNZ+ORg+H2/oYn2inoTeI02Jg2pIZbK45dNXuqd5sQ1XkIXdCaIbVGMaenMsXVo4nP96ESSdi1gtUdvs5UtyiraQuEVFvGPF7pjfbeeT6HO4oiicgKbT5JJIcZhwZ466JVdxIYHEmow/ZR8SwsrviKEqwMiHRRlasmSNtfRhjnFddjmsFf2cD2w63sr+6m2hYwhSbgOUqGs0xqQX8281FTEi0YTXo2F6UREepRUsMugw6Snfg78ziXyurmHvzXB6cm0lUUfnYnAyK14275JZQl4OoNzDv4/fT1x3k2LpVQ2pcaYbVRaI32xBEHdGAF0d6EVNWLKDkwz1X5QtwLiYsvo7rs+KI0SnoehtxxWWTZDPS19Gs9RC8COzJOYh6I8YYJyZ7PK2HNgD9fdtG4t5lzVrAbYUujH2t6GwuBLueB6emEH7yRl54J4Om/Ru1SfwSMNqd9DYcJ+I7d5234cJgdbD4uixSbAZUVYV4K1FZpWjOeGzxSXSUHxgRY28sE/H1cGjV6wOvzc4U9MbzF0ceSpIKCpmYaCMzxoDNIPLJJXnUHpmmJZZcBicbkguiyIHNZv7lhlysBh3jEqxkTJl8VZ6rcTlT+J+PTaXZG+LHFj3V+0qGLHZOq2N1EejNNsavuO203kJTs+Nw5RSNqFwNpU18UN1Nb7RfJllVafaGcaZnEps5cURlGwsYrLGkTpyMr62OsK8bU2wihUvvIO+GO0akZUWf20tFdwjZkQpAjBDBadIxNdVBUoZD2969BARRZMottzJu8XLsybmkTl+OyZFw1a6vM1rITrARlBQUQCeC1aDjsUW5PPnAFJzZk6+aLNcKRrsTndGC2ZmCq2gOhQsW8rl/vfeq6NVgdTBxUjI6QSAiqwjAxEQ7hbMKtSKhl4moN5A0cSFRv5fVx9ro8Ed47Jk9NB6+OokBcrg/CSwv3sK375rEhOunD5kutW/ERaBIEbpbeolx2dGb7fS1VvH2O0exx5mHtFrrpdJespWf/H4TG2t7CDuz6fBLVHf5iUu0DWlNjmuVntrDlK9/k4ivG6M1FoszhWBfhMSMWFKnr7jq8rQd2cQj/+9tXj7WSa+kJyyaaQ9IbKvu4viWXVoywiWgKgpVew7i94YoWjiPb39uIalTF1y164d62/m/P67nF1trafNJBKIKoigQazaw+kAznWW7r5os1wrm2EQMtlgyp8/hK08uISU1hqbuAKLBOOzXjga8bF61nV9urqbBG6U7JFPa6aO73XdaD0GNi0dVZII97Qiijs076nl+Vz3HN264qnFzm2rdHGjxsvZ4O8c27x2ynQrNsLoIFClK25FNdNbWobfYURUFd1UxgihgGMEGyKqi4O9s5FBTL3WeCL6ITJzVQMWOPVpRyUtAkaJ0lu0mY0Ihol4kPdFG0fS0q95fUVUUPA3HKWv34QnLdPijVHQFeOPtI/S1arXILpWAu5nYBCtBX4Sf/fUA7cevbpkDd8V+3l5bSYXbTzCqEJUVmr0hKvaVEQ14r6os1wK+9lp0eiO9rZ28tqWGvev28sYfXyTU03ZVrt9bX8Lrz77Lm8faaPCEqO0KcMeSPLLnXf1F2LVA/3ZgLSnjxtFVU8HWF9+4qtv2fa1VvLmznlf3NfHm67uHdPtRi7G6BES9od+r4W4h4uvh8KqXR1okZClCZZuPBk+IZJuRd/c1YXYmaw/iSyQa8HLk3VcAaNzbb1CNhIcoaeICbipKRFH7t3ZDsoLBrK2IL4dowEvxGy+N2PVFvQFncn+gsygItPnCvHuwBXfFvgv/scZZqIqMLEXobThOe8m2EZHB117HkYZebh2XyNICFzvqemgv02paXS6KFKXsgzdG7Pq+3hCRsIQUHNrYVc2wukgUKXpatfPRQtjTSU2VG+uyAmp7g6S4rHiztIKSV8JIbrmJokBIVpBVlZ5gFJ0goCha38CxiKrIKJJKrFlPhz+MJyRRU9qhJZVcJqqiEBgF5SpaO3wAiILAi2+XnlVbUGNsoEhRqrZvxJ6Si6+9dkjPrW0FjnFURSY+2Y7VIBJj1KETBap3bh5psTQuk/bS3bx9uIWwpNAXkfnTtlrajmweabE0LgO92c7UCYlEZZWQpPDytlqtv9wYx2B1kJ3mAOCP22ppObRphCXSuBIivh6ifg+ugplDmoRwwTMJgvBXQRA6BEEoOeVYvCAI6wVBqDzxv/OU3z0lCEKVIAjlgiDcPGSSagxK0sSFPHJ9DlFZ5e0jrWx4YzNhb9c53y81bCda8hLRsrcGjqlSGKBQ0+fIo0gRJEXFatCxr7GXvau3nrPEwrl0qYa9aLocedKmL+aWicn4IhIv7G3kyNr15y2XMag+VQVtrh0d6IxmrnvoY3znpiK21/ewdZU2Nsc6WfNvZ94dSwn39QypJ/liTLS/AbeccexbwEZVVQuBjSdeIwjCROBBYNKJv/m9IAi6IZNW4zTMccncdfdMsmLN/GV3Pa/85Z0LBuCJ8QXo82487ZjScQSgT9PnyGOKiWdCmoNdjb385R97zpshc05digY0XY4sOqMZV6odURR4cX8TH7y0mlBv+3n/ZjB9IgXhGp5rBVEkvmAmjvSiUV+2IHX6cr6yrIAGT4hf/23feefaj+rY1JttZMy9jaLl95AydQl6s22kRTonKVOX8PDdEynd30BvfcmF/+ASuOA3WVXVrcCZofp3Ac+d+Pk54O5Tjr+sqmpYVdVaoAqYOzSiDh86oxlzXPKoH9hnYkvKJDvBxm831/Dms6suaq9ftKeAznTaMcXTAOA+8XLM6/MkRrsTo905IjWpLhdB1FHS5OF7v95Ie8nW8773nLrUDxy7ZnQJEJOaT3zBTAxWx0iLckEEsf+5+PtN1ax67p2Lig0aTJ/IEbiG5tozsSVm8fGHF/GFf72bCTfdO2ofxKLeQGyCleqeAP/2fzsGigmf8/0fsbEJ/c9Rqyudzz88jd9+/jp+8dXl3P7kJ8maf/tpeh0Nz1mTI4G8adk8/+qhC+rycrjc4PVkVVVbAVRVbRUEIenE8XTg1AItTSeOjWricibzqUeX0OENsX7NYTrLdqNIUUS9YcSqcF8McjjEr/+8jY7SHVcmYzQEJ9p7Xwv6PMn022/jrvlZtPYGWb+jnsZD+4j6PQDIkeCIt7AZjJjUfLZ8WHn5Oo2GEEwxwLWlS0EUSZ8ynX+9bzJhSWHN4VaO7ixHb7aTPSGJ7nYftbu3nHcb/GqiKjJ1h8vxNpVfWbV8VeVammtPRW+2YXYmkxpnYX6Wkxty4zlwUyEvf1BJ3YE9BN3No2buVaQo9Qf28aPDhy+/ztI1OjYBnLnTuP+RJcRajWTGWkiwGsiLM7M4J472m8dx338JuKuPYnGl4e9sHPEkBEWKUPzu2gt6kS+Xoc4KFAY5NmhKkyAITwJPAmAY2VVKsKed1t4g/76sgLumpvKTtzKoP1pJcl4OkbBE44Hto2bCPhVPY+lwTzxjUp8nqdpfSk1ePP96Qy4Pzkhnf/NkNpd1EJEU2jt8NFe00Fm2e1QV3uyuOYwcCQ6HXse0LgEUWUFRVRbnxLM8z0Xt0nzqeoKY9SIdvjC/qqkdNeNUjoSGu7/jmNanIIrk33Az//mpWcSaDSRYDRh1AuMTrHxsUjIfVE/kez95bURbhp3JMGb/jWldQn9PztnZTmanx5JmN6AXBQJRBVlR0YsgSwpF11+HIAjUHgqNuGEVDXiHtZbc5RpW7YIgpJ6wulOBjhPHm4BTS5FnAIM2xFJV9RngGQDRmjBi+eQWVxrTb1lGjFlPXU+QeekOnv3MPA639beEiSoKLxUmsP3Vd4fNur1chuzhazBDOGIAGOv6tCfnkDJxOjq9yOJ5mUxMi6XDH2VqkpWJCRZW5MdT39tvSG2sTOOP/1c94oP8VK74O2Ywo6r934uxrstTyZy7kv99ch4xRj3uYJS0GCPTk23kxlnYWOOmwxsa8EZeUwgC18pceyo6o4WZ01K5OdeBGOojYrbR4ouiEyDTIvPApER+EpuAZ/QMzSvnGh2bAJ1lu/ntO07+/oXrcHRXIigSauIENtd7afaG6WutYcbcRXxiXhYHZ2fwq582j7rn6VByuZud7wCfOvHzp4BVpxx/UBAEkyAIuUAhsPfKRBw+BFFk/j238vQjM/i3G3JxWgyEZIVEq46bC5w4LXrsRj0LixIwxcSPir3h4UB0ZAG4Trwcs/qE/hT3O5cX8J+fmsVPby3i0WnJpMUYicgqelXCYeyPfYkx6vGFogP9oq4VREcW9Gd5whjX5alMnpnGDZkxzHREmJ1qw6gTMOtFcmMN1Lv9PPf0O0Nei2ZUoDPCNTDXDkYwItMbATHQg6W9lKwYA76ogh8Tjd4oshQZaRGHlGt1bEK/B6hs0wcc6/Sh2BNRfb3Yu8rJdVp5fmMlAXczrR0+DKJATryVrFlXr73USHBBj5UgCC8BS4AEQRCagO8BPwVeFQThCaABuB9AVdVjgiC8CpQCEvBFVVVHbedYndHCovGJpNoNmKI+so3g1+lQVBVbuJdxCTGUdgYoTLBxx0PLWfOmZbjd+8OOVLcZ1dcGUojosVfQpcxATJ6C0lniEAShkjGsT+hvU7B6Sw3zsmehVyVEv5scUY+qs0FUIk5vJNluot0Xpi8kjenGxufUpbuMa0GXJzHanUzNisMTUXACpp56Um0uAoINSYUJKQ5iM8fj72wYNTE5l8Ng+kRvASl041ifa88kcfx8Prswp78Yrj0BQ1c1hrZScmKSaY/G8f9WH8fXVjfSYl42H5WxeSapdhOoQYhNAn8PJotAd7sfORKkq6WP450+nl1VSt2u9SMt6rByQcNKVdWHzvGr5ed4/38B/3UlQl0t5EgQSVEJRBV0RjsGfycxfjeqwYwY6iNd56XPkcqxDh9LxyVyZEIhvXVH+4tyFszEEpeMu/ogQfegXttRiT5nybl+VaGq6uwzD44lfcKJ2JbmNkx6kaCqwyaI6LytEA6CJQbZnohZ7yAsKXT7IihjeFV8Ll0KpliUQFfhmcfHmi5PYkvMYllBAkFJwWxyYJEj6Dwt2M0xhGNSuXOci7p7J/HLkm2nJSQkjJ9HUk4msqwgRWSaDu4YNTFYgzGYPpWuMhRVHfNz7ZnEJsWRYjeRbNVDSEExxyKGPAj1h0kqmI9RLxINnHtrV2c0n0gs6rc/RptB/VEZm6didaXjMOlBkUDUIyfkkG4zcNOSXJ4tttC0fyNPSwrRUPC0hA6d0YyoN2KKiSfi91wTfW4/0i1t9GY7KQ4TiqoSlBT0RhtiWyWE/AiJmSCIZMcbichWuoNRJo1LoGp3CmFPJ96mCoI97UT6rl7TSI2LIz49hXEuKzpBQDVYUPVmpKrDoCgYxs8hJdFFb8hIc5NnTBtWHwV0RjOZk8eTYDVgM4joRAHF5kIM+xF7WzHJEkpsJgfrek7TpSCKRPp6kKJpWB0mgr4IBot9VBtWHyV6Wjpo6QuR4TBgk6MgR1BFHWo0gr6ngZk5Tt4XBy/lJOoNzH/oQWbnxxORFDbtbhzRfnMa/ZhinKiAYnUiqArojNR5Imzf34wU8qEqCo171wy8Py57MpOWzCbWZiQiKfj8EaoOlF926zhBFEkYPx8p6KendmT7N36kDSuDLZaeQBSdKGAziCCJCEYz4bL96Dxu9OPn0xeRWV/VRZLdxMwcJ7U3LSUUiFC9e5/W6HgUIuoNxDgtRGQVUQBBkREUCYBoezOiIx5TbBpmvRlvV2BUllzQ+Cei3sj4QheBqEwwqiNGJyEG+le0SsCLCIhxmbS39w1kd4p6A2kzb8Jk0VO7a+PApH4lmBwJqIp8TaymRwO9Dcdp8CxjeoodiyUWIRJAUBV0yVmoejPFdT3IkcHjHxUpyv633qMmbxo6o5ne+mNXWXqNwWgv2cajv0nm7a/fQEpMMn0S/M+6cpqPHRsYf6eOQ197LSUfykRDPiJ93VecDW1xpfOFTy/iL68e1QyrkSTQ2chzq0q5uTARY5wRW9gPBiO6GCeyuw29HKE7KLOptIPGajcBTwBFiqA324gGfVd8faPdScrkhbSV7NAm7CFCVWSaypopbh1HgtWJYorFYvahcyah+PtAURBUFZdFh9k29IVDRb2B1OkrcFcVX9NZL1cLORKkpLSDrjmZxJn1BBQ9Nr0JwdeJKkVRAl70qkQ0fHpIiigKpBe4SMm5l4p9FXSW7rxsGWJS85m0bCHHPtTG6VChKjJ17gB9YRmDqMdmdaIarQiRAIrVSWtHy3kfsmFvF62HNl5FiTUuhoaD+wlGrwerSESWWfvOuSvUSyH/kJbTMMcmsnpvIx1XMNaHimszze0SUBXwRSQisopqsoEggt6AoDegmmz0hqLcMSMNvUFH66ENtB3ZTNPeC7enuBhm3nUHixbljOkA6tGGqij4OxvpCUYJRlXCsoJqsCIYzYgxcehcKbSoNko6/IT8Q++tyl24ku/9yzzMsYlDfu6PIooUpXbfTg42e5AUFX9UQTE7EOQosseN3N6I6HejN/xz28iZN40J01M5vquMw2s3X3HCiSk2geqDVddm1uEIEfH18OKbJRzt8OONnIiTMlhoMKTyUlWQtrrekRVQ47LQm+20+yO4QzIRWcVgtl+1a/fUHKL4zVdHxeLnI+2xAjBa9PRFZDxhmdgYc3/gnSIjxrpQDVasBh3pDjP3rSjg2a4VtB7aMCSBkoIo4nUHWL3z8LAWKvso4u9s4B8fVpHuMDMrNQaHqAO9AdFsA7MNp0lHIKoQ9A5tfJwgirhSY+gORgn7tNi7oSLs6WRrWSe3FPUbq3ajiMEUgz45CzUSwmdNwu89MvD+nprDbKgrGbLCrwF3Cwn509AZLVdWRV3jNCIBD+WdPsYn2FCsdkKySlV3Hz/7x0Hc1QdHWjyNy8Df2cA3Xihm1oQkylv78FxulfrL4ORzWWc0j3jR54+8YSVFZco6fSTbjPjMOpxi/y0RrTGooo6j7X388tUjJGbEYomxIOqNQ6I0VVEo27BKi/EZBhQp2p/iq6pEZAV0IjiS+t2zkRA6UcAgCsMSuN5S08OvKzquufpYI4mqyFQeaeP5tBhun5RCqv3EFq4zDfQGSjoC9DQ3Dby/f0wNzbgS9Qby5l1Hb4dfS3QYYrqrivm/P0b4YOEUfnD7BFYf72DDnkYaD2w9b0agxuglGvBy6K2XOfKu4ao/20S9gbkff4jutj4qN60a0UzRj7xhVbdrPT+uKOXNG+by0mfnEqc3o0/NRXa3IshRjjR7qNmxgcoTgZRD+WXRjKrho7l4M9/6vcLMuRk8ff9k7KIOQYqiREL0hGT+/Zk99NYNbUdzVVFwVx/kiX/9OGUtWWx45s9Dev6PKqqi0Lz/ff5atofi22/lhU/PwWaJBUUiYk9m/eF6/B3DU6JbkaKUf7h2SALgNU5HjoRwV+xnZ81h7ngvhajfo93na4SReLZNvvVePvjCHJb8z84R/w595A0rKeTH21zBgTcb2H/bBG5LskFyPrq4FMJWF5VtjShSRDOCxhjRgJeG3e/RU59P+dIC5oTdqM5UMFroCkg0Htg6LO7i5AmzkRWVnW+tG/Jzf5RRFYWwt4t9r77MspoeHAlWHE4L2clujpZ1DquHQ9uqH14UKXpRbaUMVgeg6eNaQWc0ozfbifi6r9gQEkSRFfMyqfFK9HYEhkjCy+cjb1idRI6E+N+15dz6xXn9cVaCiArkJdmGbPtP4+oT7GnjmV11TL97JmFZRRAEou4gjoyiy66Xcj4a937AM3vRvi/DhCJFadj93sDrnfRPqiO9QtUYXqyJmcy+80Yayruo3/mepu8xjsmRwJz77qAg2c47r++44gQTVVHYXtLGsSYPLUd3D5GUl49mWJ3CodXr+NGkZB6dmY5FH48+rGXrjXWkkJ9Xf/8CsvIw31xeQDCq8OstNXibhieoUjOorj7aQ/baJ9LXzZENewm4mzV9XwMoUoSG451U7qvC21Q+JOc8+M47qIo8KhJMNMPqFEK97Tz7/FaSHDdiEEWS7EYOVncPW9Cq3mwjefIi/J2N9NYPbbyPxj+J+Hp485lXOHhkMXqjDm9XACl05XXINDQ0rg5SyK/NkdcQJ0M1hvqcowXNsDoDd8U+XtiYy/cfmMbP15Rx5INNw+KF0BnNzH/oAR5dkM1//kWHp7FUW4kNI2Fvl9b2QkNDQ0Nj2NEMqzNQFYWqnbt5Jd+Frzc0bL0ARb2RhuOdfHNzsWZUaWhoaGhoXCN85CuvD4aoN2LSD/+tMZh0mGKcCOdoNqqhoaGhoaExttAMqzMQRJHC62Zz/4x0HAnWYWtNIkeCuFJj+OG/LiFz7k0IoqYKDQ0NDQ2NsY62FXgGCePn88llBRxt7+PLywv4P2DXCy8NeZyVIkU58Obr1ByaRtjTpW0FDjMmRwKFi5ahN4h4u4LU71mnZfBpaGhoaAw5gqqqIy0DgiB0An6ga6RlOYMERp9MMDxyZauqOiTuOUEQ+oChyaEdWkajPke7LrWxeWkMtVxDpkvQxuYlMtrHpqbLS+Oqjc1R4bFSVTVREIT9qqrOHmlZTmU0ygSjV65TKB+N8o3G+zYaZToVbWxeGqNVrlPQxuZFMhplOgNNl5fA1ZRLC+zR0NDQ0NDQ0BgiNMNKQ0NDQ0NDQ2OIGE2G1TMjLcAgjEaZYPTKdZLRKt9olGs0ynQmo1HG0SgTjF65TjJa5RuNco1GmU5ltMr3kZdrVASva2hoaGhoaGhcC4wmj5WGhoaGhoaGxphmxA0rQRBuEQShXBCEKkEQvjXCstQJgnBUEIRDgiDsP3EsXhCE9YIgVJ743znMMvxVEIQOQRBKTjl2ThkEQXjqxL0rFwTh5uGU7WIYLfocDbo8cc0xq8/RossTsoy4PjVdDpksI67LE9fU9Dk0soy4PkedLlVVHbF/gA6oBvIAI3AYmDiC8tQBCWcc+znwrRM/fwv42TDLsAiYCZRcSAZg4ol7ZgJyT9xLnabP0aHLsazP0aTL0aJPTZfXji41fV5b+hxtuhxpj9VcoEpV1RpVVSPAy8BdIyzTmdwFPHfi5+eAu4fzYqqqbgXO7Px8LhnuAl5WVTWsqmotUEX/PR0pRrs+r6ouYUzrc7TrErSxebFouhwETZ/Dykd6bI60YZUONJ7yuunEsZFCBT4QBOGAIAhPnjiWrKpqK8CJ/5NGQK5zyTDa7t9okme06vJ8coym+zeaZIHRq09Nl5fOaNXl+eQYTfdwNMkCo1efI6bLka68LgxybCTTFBeqqtoiCEISsF4QhLIRlOViGG33bzTJM9Z0CaPr/o0mWWDs6XM03b/RJAuMPV3C6LqHo0kWGHv6HPb7N9IeqyYg85TXGUDLCMmCqqotJ/7vAN6i3z3YLghCKsCJ/ztGQLRzyTCq7h+jSJ5RrEvOI8eouX+jTJbRrE9Nl5fIKNYl55FjNN3D0STLaNbniOlypA2rfUChIAi5giAYgQeBd0ZCEEEQbIIgxJz8GbgJKDkhz6dOvO1TwKoREO9cMrwDPCgIgkkQhFygENg7AvKdZFToc5TrkvPIMZr0OSp0CaNen5ouL4FRrkvOI4emz0EY5focOV0OZ6T+RUbzrwQq6I/M/84IypFHf6bAYeDYSVkAF7ARqDzxf/wwy/ES0ApE6besnzifDMB3Tty7cuBWTZ+jR5djXZ+jQZejSZ+aLq8dXWr6vLb0Odp0qVVe19DQ0NDQ0NAYIkZ6K1BDQ0NDQ0ND45pBM6w0NDQ0NDQ0NIYIzbDS0NDQ0NDQ0BgiNMNKQ0NDQ0NDQ2OI0AwrDQ0NDQ0NDY0hQjOsNDQ0NDQ0NDSGCM2w0tDQ0NDQ0NAYIjTDSkNDQ0NDQ0NjiPj/AWmSAF7GokbLAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, { "data": { "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 12;\n", - " var nbb_unformatted_code = \"datamodule = NowcastingDataModule(df, fold=0, data_dir=PATH, batch_size=2)\\ndatamodule.setup()\\nfor batch in datamodule.train_dataloader():\\n xs, ys = batch\\n x, y = xs[0], ys[0]\\n x = x.permute(1, 2, 0).numpy()\\n y = y.permute(1, 2, 0).numpy()\\n visualize(x, y)\\n break\";\n", - " var nbb_formatted_code = \"datamodule = NowcastingDataModule(df, fold=0, data_dir=PATH, batch_size=2)\\ndatamodule.setup()\\nfor batch in datamodule.train_dataloader():\\n xs, ys = batch\\n x, y = xs[0], ys[0]\\n x = x.permute(1, 2, 0).numpy()\\n y = y.permute(1, 2, 0).numpy()\\n visualize(x, y)\\n break\";\n", + " var nbb_cell_id = 6;\n", + " var nbb_unformatted_code = \"# df = pd.read_csv(args[\\\"train_folds_csv\\\"])\\n# datamodule = NowcastingDataModule(df, fold=0, batch_size=2)\\n# datamodule.setup()\\n# for batch in datamodule.train_dataloader():\\n# xs, ys = batch\\n# idx = np.random.randint(len(xs))\\n# x, y = xs[idx], ys[idx]\\n# x = x.permute(1, 2, 0).numpy()\\n# y = y.permute(1, 2, 0).numpy()\\n# visualize(x, y)\\n# break\";\n", + " var nbb_formatted_code = \"# df = pd.read_csv(args[\\\"train_folds_csv\\\"])\\n# datamodule = NowcastingDataModule(df, fold=0, batch_size=2)\\n# datamodule.setup()\\n# for batch in datamodule.train_dataloader():\\n# xs, ys = batch\\n# idx = np.random.randint(len(xs))\\n# x, y = xs[idx], ys[idx]\\n# x = x.permute(1, 2, 0).numpy()\\n# y = y.permute(1, 2, 0).numpy()\\n# visualize(x, y)\\n# break\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -597,27 +510,36 @@ } ], "source": [ - "datamodule = NowcastingDataModule(df, fold=0, data_dir=PATH, batch_size=2)\n", - "datamodule.setup()\n", - "for batch in datamodule.train_dataloader():\n", - " xs, ys = batch\n", - " x, y = xs[0], ys[0]\n", - " x = x.permute(1, 2, 0).numpy()\n", - " y = y.permute(1, 2, 0).numpy()\n", - " visualize(x, y)\n", - " break" + "# df = pd.read_csv(args[\"train_folds_csv\"])\n", + "# datamodule = NowcastingDataModule(df, fold=0, batch_size=2)\n", + "# datamodule.setup()\n", + "# for batch in datamodule.train_dataloader():\n", + "# xs, ys = batch\n", + "# idx = np.random.randint(len(xs))\n", + "# x, y = xs[idx], ys[idx]\n", + "# x = x.permute(1, 2, 0).numpy()\n", + "# y = y.permute(1, 2, 0).numpy()\n", + "# visualize(x, y)\n", + "# break" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## RainNet" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Model" + "### Layers" ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -625,9 +547,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 13;\n", - " var nbb_unformatted_code = \"class Block(nn.Module):\\n def __init__(self, in_ch, out_ch):\\n super().__init__()\\n self.net = nn.Sequential(\\n nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1),\\n nn.ReLU(inplace=True),\\n nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1),\\n nn.ReLU(inplace=True),\\n )\\n\\n def forward(self, x):\\n return self.net(x)\";\n", - " var nbb_formatted_code = \"class Block(nn.Module):\\n def __init__(self, in_ch, out_ch):\\n super().__init__()\\n self.net = nn.Sequential(\\n nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1),\\n nn.ReLU(inplace=True),\\n nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1),\\n nn.ReLU(inplace=True),\\n )\\n\\n def forward(self, x):\\n return self.net(x)\";\n", + " var nbb_cell_id = 10;\n", + " var nbb_unformatted_code = \"class Block(nn.Module):\\n def __init__(self, in_ch, out_ch):\\n super().__init__()\\n self.net = nn.Sequential(\\n nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1, bias=False),\\n nn.ReLU(inplace=True),\\n nn.BatchNorm2d(out_ch),\\n nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1, bias=False),\\n nn.ReLU(inplace=True),\\n nn.BatchNorm2d(out_ch),\\n )\\n\\n def forward(self, x):\\n return self.net(x)\\n\\n\\nclass Encoder(nn.Module):\\n def __init__(self, chs=[4, 64, 128, 256, 512, 1024], drop_rate=0.5):\\n super().__init__()\\n self.blocks = nn.ModuleList(\\n [Block(chs[i], chs[i + 1]) for i in range(len(chs) - 1)]\\n )\\n self.pool = nn.MaxPool2d(kernel_size=2, stride=2)\\n self.dropout = nn.Dropout(p=drop_rate)\\n\\n def forward(self, x):\\n ftrs = []\\n for i, block in enumerate(self.blocks):\\n x = block(x)\\n ftrs.append(x)\\n if i >= 3:\\n x = self.dropout(x)\\n if i < 4:\\n x = self.pool(x)\\n return ftrs\\n\\n\\nclass Decoder(nn.Module):\\n def __init__(self, chs=[1024, 512, 256, 128, 64]):\\n super().__init__()\\n self.chs = chs\\n# self.ups = nn.ModuleList(\\n# [nn.Upsample(scale_factor=2, mode=\\\"nearest\\\") for i in range(len(chs) - 1)]\\n# )\\n self.ups = nn.ModuleList(\\n [nn.ConvTranspose2d(chs[i], chs[i+1], kernel_size=2, stride=2) for i in range(len(chs) - 1)]\\n )\\n self.convs = nn.ModuleList(\\n [Block(chs[i] + chs[i + 1], chs[i + 1]) for i in range(len(chs) - 1)]\\n )\\n\\n def forward(self, x, ftrs):\\n for i in range(len(self.chs) - 1):\\n x = self.ups[i](x)\\n x = torch.cat([ftrs[i], x], dim=1)\\n x = self.convs[i](x)\\n return x\";\n", + " var nbb_formatted_code = \"class Block(nn.Module):\\n def __init__(self, in_ch, out_ch):\\n super().__init__()\\n self.net = nn.Sequential(\\n nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1, bias=False),\\n nn.ReLU(inplace=True),\\n nn.BatchNorm2d(out_ch),\\n nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1, bias=False),\\n nn.ReLU(inplace=True),\\n nn.BatchNorm2d(out_ch),\\n )\\n\\n def forward(self, x):\\n return self.net(x)\\n\\n\\nclass Encoder(nn.Module):\\n def __init__(self, chs=[4, 64, 128, 256, 512, 1024], drop_rate=0.5):\\n super().__init__()\\n self.blocks = nn.ModuleList(\\n [Block(chs[i], chs[i + 1]) for i in range(len(chs) - 1)]\\n )\\n self.pool = nn.MaxPool2d(kernel_size=2, stride=2)\\n self.dropout = nn.Dropout(p=drop_rate)\\n\\n def forward(self, x):\\n ftrs = []\\n for i, block in enumerate(self.blocks):\\n x = block(x)\\n ftrs.append(x)\\n if i >= 3:\\n x = self.dropout(x)\\n if i < 4:\\n x = self.pool(x)\\n return ftrs\\n\\n\\nclass Decoder(nn.Module):\\n def __init__(self, chs=[1024, 512, 256, 128, 64]):\\n super().__init__()\\n self.chs = chs\\n # self.ups = nn.ModuleList(\\n # [nn.Upsample(scale_factor=2, mode=\\\"nearest\\\") for i in range(len(chs) - 1)]\\n # )\\n self.ups = nn.ModuleList(\\n [\\n nn.ConvTranspose2d(chs[i], chs[i + 1], kernel_size=2, stride=2)\\n for i in range(len(chs) - 1)\\n ]\\n )\\n self.convs = nn.ModuleList(\\n [Block(chs[i] + chs[i + 1], chs[i + 1]) for i in range(len(chs) - 1)]\\n )\\n\\n def forward(self, x, ftrs):\\n for i in range(len(self.chs) - 1):\\n x = self.ups[i](x)\\n x = torch.cat([ftrs[i], x], dim=1)\\n x = self.convs[i](x)\\n return x\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -653,50 +575,18 @@ " def __init__(self, in_ch, out_ch):\n", " super().__init__()\n", " self.net = nn.Sequential(\n", - " nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1),\n", + " nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1, bias=False),\n", " nn.ReLU(inplace=True),\n", - " nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1),\n", + " nn.BatchNorm2d(out_ch),\n", + " nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1, bias=False),\n", " nn.ReLU(inplace=True),\n", + " nn.BatchNorm2d(out_ch),\n", " )\n", "\n", " def forward(self, x):\n", - " return self.net(x)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 17;\n", - " var nbb_unformatted_code = \"class Encoder(nn.Module):\\n def __init__(self, chs=[4, 64, 128, 256, 512, 1024], drop_rate=0.5):\\n super().__init__()\\n self.blocks = nn.ModuleList(\\n [Block(chs[i], chs[i + 1]) for i in range(len(chs) - 1)]\\n )\\n self.pool = nn.MaxPool2d(kernel_size=2, stride=2)\\n self.dropout = nn.Dropout(p=drop_rate)\\n\\n def forward(self, x):\\n ftrs = []\\n for i, block in enumerate(self.blocks):\\n x = block(x)\\n ftrs.append(x)\\n if i >= 3:\\n x = self.dropout(x)\\n if i < 4:\\n x = self.pool(x)\\n return ftrs\";\n", - " var nbb_formatted_code = \"class Encoder(nn.Module):\\n def __init__(self, chs=[4, 64, 128, 256, 512, 1024], drop_rate=0.5):\\n super().__init__()\\n self.blocks = nn.ModuleList(\\n [Block(chs[i], chs[i + 1]) for i in range(len(chs) - 1)]\\n )\\n self.pool = nn.MaxPool2d(kernel_size=2, stride=2)\\n self.dropout = nn.Dropout(p=drop_rate)\\n\\n def forward(self, x):\\n ftrs = []\\n for i, block in enumerate(self.blocks):\\n x = block(x)\\n ftrs.append(x)\\n if i >= 3:\\n x = self.dropout(x)\\n if i < 4:\\n x = self.pool(x)\\n return ftrs\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ + " return self.net(x)\n", + "\n", + "\n", "class Encoder(nn.Module):\n", " def __init__(self, chs=[4, 64, 128, 256, 512, 1024], drop_rate=0.5):\n", " super().__init__()\n", @@ -715,52 +605,24 @@ " x = self.dropout(x)\n", " if i < 4:\n", " x = self.pool(x)\n", - " return ftrs" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 16;\n", - " var nbb_unformatted_code = \"class Decoder(nn.Module):\\n def __init__(self, chs=[1024, 512, 256, 128, 64], bn=True):\\n super().__init__()\\n self.chs = chs\\n self.ups = nn.ModuleList(\\n [\\n nn.Upsample(scale_factor=2, mode=\\\"nearest\\\")\\n for i in range(len(chs) - 1)\\n ]\\n )\\n self.convs = nn.ModuleList(\\n [Block(chs[i] + chs[i + 1], chs[i + 1], bn=bn) for i in range(len(chs) - 1)]\\n )\\n\\n def forward(self, x, ftrs):\\n for i in range(len(self.chs) - 1):\\n x = self.ups[i](x)\\n x = torch.cat([ftrs[i], x], dim=1)\\n x = self.convs[i](x)\\n return x\";\n", - " var nbb_formatted_code = \"class Decoder(nn.Module):\\n def __init__(self, chs=[1024, 512, 256, 128, 64], bn=True):\\n super().__init__()\\n self.chs = chs\\n self.ups = nn.ModuleList(\\n [nn.Upsample(scale_factor=2, mode=\\\"nearest\\\") for i in range(len(chs) - 1)]\\n )\\n self.convs = nn.ModuleList(\\n [Block(chs[i] + chs[i + 1], chs[i + 1], bn=bn) for i in range(len(chs) - 1)]\\n )\\n\\n def forward(self, x, ftrs):\\n for i in range(len(self.chs) - 1):\\n x = self.ups[i](x)\\n x = torch.cat([ftrs[i], x], dim=1)\\n x = self.convs[i](x)\\n return x\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ + " return ftrs\n", + "\n", + "\n", "class Decoder(nn.Module):\n", - " def __init__(self, chs=[1024, 512, 256, 128, 64], bn=True):\n", + " def __init__(self, chs=[1024, 512, 256, 128, 64]):\n", " super().__init__()\n", " self.chs = chs\n", + " # self.ups = nn.ModuleList(\n", + " # [nn.Upsample(scale_factor=2, mode=\"nearest\") for i in range(len(chs) - 1)]\n", + " # )\n", " self.ups = nn.ModuleList(\n", - " [nn.Upsample(scale_factor=2, mode=\"nearest\") for i in range(len(chs) - 1)]\n", + " [\n", + " nn.ConvTranspose2d(chs[i], chs[i + 1], kernel_size=2, stride=2)\n", + " for i in range(len(chs) - 1)\n", + " ]\n", " )\n", " self.convs = nn.ModuleList(\n", - " [Block(chs[i] + chs[i + 1], chs[i + 1], bn=bn) for i in range(len(chs) - 1)]\n", + " [Block(chs[i] + chs[i + 1], chs[i + 1]) for i in range(len(chs) - 1)]\n", " )\n", "\n", " def forward(self, x, ftrs):\n", @@ -771,60 +633,6 @@ " return x" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "class RainNet(pl.LightningModule):\n", - " def __init__(self, lr=1e-4, enc_chs=[4,64,128,256,512,1024], dec_chs=[1024,512,256,128,64]):\n", - " super().__init__()\n", - " self.lr = lr\n", - " \n", - " self.criterion = LogCoshLoss()\n", - " \n", - " self.encoder = Encoder(enc_chs)\n", - " self.decoder = Decoder(dec_chs)\n", - " self.out = nn.Sequential(\n", - " nn.Conv2d(64, 2, kernel_size=3, padding=1),\n", - " nn.ReLU(inplace=True),\n", - " nn.Conv2d(2, 1, kernel_size=1),\n", - " nn.ReLU(inplace=True),\n", - " )\n", - " \n", - " def forward(self, x):\n", - " ftrs = self.encoder(x)\n", - " ftrs = ftrs[::-1]\n", - " x = self.decoder(ftrs[0], ftrs[1:])\n", - " out = self.out(x)\n", - " return out\n", - " \n", - " def shared_step(self, batch, batch_idx):\n", - " x, y = batch\n", - " y_hat = self(x)\n", - " loss = self.criterion(y_hat, y)\n", - " return loss, y, y_hat\n", - " \n", - " def training_step(self, batch, batch_idx):\n", - " loss, y, y_hat = self.shared_step(batch, batch_idx)\n", - " self.log(\"train_loss\", loss)\n", - " return {\"loss\": loss}\n", - " \n", - " def training_epoch_end(self, outputs):\n", - " pass\n", - " \n", - " def validation_step(self, batch, batch_idx):\n", - " pass\n", - " \n", - " def validation_epoch_end(self, outputs):\n", - " pass\n", - " \n", - " def configure_optimizers(self):\n", - " optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)\n", - " return optimizer" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -834,7 +642,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -842,9 +650,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 13;\n", - " var nbb_unformatted_code = \"class RainNet(pl.LightningModule):\\n def __init__(\\n self,\\n lr=1e-4,\\n bn=True,\\n enc_chs=[4, 64, 128, 256, 512, 1024],\\n dec_chs=[1024, 512, 256, 128, 64],\\n ):\\n super().__init__()\\n self.save_hyperparameters()\\n\\n # criterion and metrics\\n self.criterion = LogCoshLoss()\\n self.train_mae = pl.metrics.MeanAbsoluteError()\\n self.val_mae = pl.metrics.MeanAbsoluteError()\\n\\n # layers\\n self.encoder = Encoder(enc_chs, bn=bn)\\n self.decoder = Decoder(dec_chs, bn=bn)\\n if bn:\\n self.out = nn.Sequential(\\n nn.Conv2d(64, 2, kernel_size=3, bias=False, padding=1),\\n nn.BatchNorm2d(2),\\n nn.ReLU(inplace=True),\\n nn.Conv2d(2, 1, kernel_size=1, bias=False),\\n nn.BatchNorm2d(1),\\n nn.ReLU(inplace=True),\\n )\\n else:\\n self.out = nn.Sequential(\\n nn.Conv2d(64, 2, kernel_size=3, bias=False, padding=1),\\n nn.ReLU(inplace=True),\\n nn.Conv2d(2, 1, kernel_size=1, bias=False),\\n nn.ReLU(inplace=True),\\n )\\n\\n def forward(self, x):\\n ftrs = self.encoder(x)\\n ftrs = list(reversed(ftrs))\\n x = self.decoder(ftrs[0], ftrs[1:])\\n out = self.out(x)\\n return out\\n\\n def _shared_step(self, batch, batch_idx):\\n x, y = batch\\n y_hat = self(x)\\n loss = self.criterion(y_hat, y)\\n return loss, y, y_hat\\n\\n def training_step(self, batch, batch_idx):\\n loss, y, y_hat = self._shared_step(batch, batch_idx)\\n self.log(\\\"train_loss\\\", loss)\\n self.log(\\\"train_mae\\\", self.train_mae(y_hat, y))\\n return {\\\"loss\\\": loss}\\n\\n def training_epoch_end(self, outputs):\\n self.log(\\\"train_mae\\\", self.train_mae.compute())\\n self.train_mae.reset()\\n\\n def validation_step(self, batch, batch_idx):\\n loss, y, y_hat = self._shared_step(batch, batch_idx)\\n self.log(\\\"val_loss\\\", loss)\\n self.log(\\\"val_mae\\\", self.val_mae(y_hat, y))\\n return {\\\"loss\\\": loss, \\\"y\\\": y.detach(), \\\"y_hat\\\": y_hat.detach()}\\n\\n def validation_epoch_end(self, outputs):\\n avg_loss = torch.stack([x[\\\"loss\\\"] for x in outputs]).mean()\\n self.log(\\\"loss\\\", avg_loss)\\n # y = torch.cat([x[\\\"y\\\"] for x in outputs])\\n # y_hat = torch.cat([x[\\\"y_hat\\\"] for x in outputs])\\n # mae = self.mae(y_hat, y)\\n self.log(\\\"val_mae\\\", self.val_mae.compute())\\n print(f\\\"Epoch {self.current_epoch} | MAE: {self.val_mae.compute()}\\\")\\n self.val_mae.reset()\\n\\n def configure_optimizers(self):\\n optimizer = torch.optim.Adam(self.parameters(), lr=self.hparams.lr)\\n return optimizer\";\n", - " var nbb_formatted_code = \"class RainNet(pl.LightningModule):\\n def __init__(\\n self,\\n lr=1e-4,\\n bn=True,\\n enc_chs=[4, 64, 128, 256, 512, 1024],\\n dec_chs=[1024, 512, 256, 128, 64],\\n ):\\n super().__init__()\\n self.save_hyperparameters()\\n\\n # criterion and metrics\\n self.criterion = LogCoshLoss()\\n self.train_mae = pl.metrics.MeanAbsoluteError()\\n self.val_mae = pl.metrics.MeanAbsoluteError()\\n\\n # layers\\n self.encoder = Encoder(enc_chs, bn=bn)\\n self.decoder = Decoder(dec_chs, bn=bn)\\n if bn:\\n self.out = nn.Sequential(\\n nn.Conv2d(64, 2, kernel_size=3, bias=False, padding=1),\\n nn.BatchNorm2d(2),\\n nn.ReLU(inplace=True),\\n nn.Conv2d(2, 1, kernel_size=1, bias=False),\\n nn.BatchNorm2d(1),\\n nn.ReLU(inplace=True),\\n )\\n else:\\n self.out = nn.Sequential(\\n nn.Conv2d(64, 2, kernel_size=3, bias=False, padding=1),\\n nn.ReLU(inplace=True),\\n nn.Conv2d(2, 1, kernel_size=1, bias=False),\\n nn.ReLU(inplace=True),\\n )\\n\\n def forward(self, x):\\n ftrs = self.encoder(x)\\n ftrs = list(reversed(ftrs))\\n x = self.decoder(ftrs[0], ftrs[1:])\\n out = self.out(x)\\n return out\\n\\n def _shared_step(self, batch, batch_idx):\\n x, y = batch\\n y_hat = self(x)\\n loss = self.criterion(y_hat, y)\\n return loss, y, y_hat\\n\\n def training_step(self, batch, batch_idx):\\n loss, y, y_hat = self._shared_step(batch, batch_idx)\\n self.log(\\\"train_loss\\\", loss)\\n self.log(\\\"train_mae\\\", self.train_mae(y_hat, y))\\n return {\\\"loss\\\": loss}\\n\\n def training_epoch_end(self, outputs):\\n self.log(\\\"train_mae\\\", self.train_mae.compute())\\n self.train_mae.reset()\\n\\n def validation_step(self, batch, batch_idx):\\n loss, y, y_hat = self._shared_step(batch, batch_idx)\\n self.log(\\\"val_loss\\\", loss)\\n self.log(\\\"val_mae\\\", self.val_mae(y_hat, y))\\n return {\\\"loss\\\": loss, \\\"y\\\": y.detach(), \\\"y_hat\\\": y_hat.detach()}\\n\\n def validation_epoch_end(self, outputs):\\n avg_loss = torch.stack([x[\\\"loss\\\"] for x in outputs]).mean()\\n self.log(\\\"loss\\\", avg_loss)\\n # y = torch.cat([x[\\\"y\\\"] for x in outputs])\\n # y_hat = torch.cat([x[\\\"y_hat\\\"] for x in outputs])\\n # mae = self.mae(y_hat, y)\\n self.log(\\\"val_mae\\\", self.val_mae.compute())\\n print(f\\\"Epoch {self.current_epoch} | MAE: {self.val_mae.compute()}\\\")\\n self.val_mae.reset()\\n\\n def configure_optimizers(self):\\n optimizer = torch.optim.Adam(self.parameters(), lr=self.hparams.lr)\\n return optimizer\";\n", + " var nbb_cell_id = 11;\n", + " var nbb_unformatted_code = \"class RainNet(pl.LightningModule):\\n def __init__(\\n self,\\n lr=1e-4,\\n enc_chs=[4, 64, 128, 256, 512, 1024],\\n dec_chs=[1024, 512, 256, 128, 64],\\n num_train_steps=None,\\n ):\\n super().__init__()\\n\\n # Parameters\\n self.lr = lr\\n self.num_train_steps = num_train_steps\\n\\n # self.criterion = LogCoshLoss()\\n self.criterion = nn.L1Loss()\\n\\n # Layers\\n self.encoder = Encoder(enc_chs)\\n self.decoder = Decoder(dec_chs)\\n self.out = nn.Sequential(\\n nn.Conv2d(64, 2, kernel_size=3, padding=1),\\n nn.ReLU(inplace=True),\\n nn.BatchNorm2d(2),\\n nn.Conv2d(2, 1, kernel_size=1),\\n nn.ReLU(inplace=True),\\n )\\n\\n def forward(self, x):\\n ftrs = self.encoder(x)\\n ftrs = ftrs[::-1]\\n x = self.decoder(ftrs[0], ftrs[1:])\\n out = self.out(x)\\n return out\\n\\n def shared_step(self, batch, batch_idx):\\n x, y = batch\\n y_hat = self(x)\\n loss = self.criterion(y_hat, y)\\n return loss, y, y_hat\\n\\n def training_step(self, batch, batch_idx):\\n loss, y, y_hat = self.shared_step(batch, batch_idx)\\n self.log(\\\"train_loss\\\", loss)\\n return {\\\"loss\\\": loss}\\n\\n def validation_step(self, batch, batch_idx):\\n loss, y, y_hat = self.shared_step(batch, batch_idx)\\n return {\\\"loss\\\": loss, \\\"y\\\": y.detach(), \\\"y_hat\\\": y_hat.detach()}\\n\\n def validation_epoch_end(self, outputs):\\n avg_loss = torch.stack([x[\\\"loss\\\"] for x in outputs]).mean()\\n self.log(\\\"val_loss\\\", avg_loss)\\n\\n tfms = nn.Sequential(\\n T.CenterCrop(120),\\n )\\n\\n y = torch.cat([x[\\\"y\\\"] for x in outputs])\\n y = tfms(y)\\n y = y.detach().cpu().numpy()\\n y = y.reshape(-1, 120 * 120)\\n\\n y_hat = torch.cat([x[\\\"y_hat\\\"] for x in outputs])\\n y_hat = tfms(y_hat)\\n y_hat = y_hat.detach().cpu().numpy()\\n y_hat = y_hat.reshape(-1, 120 * 120)\\n\\n y = 255.0 * y[:, args[\\\"dams\\\"]]\\n y = np.round(y).clip(0, 255)\\n y_hat = 255.0 * y_hat[:, args[\\\"dams\\\"]]\\n y_hat = np.round(y_hat).clip(0, 255)\\n # mae = metrics.mean_absolute_error(y, y_hat)\\n\\n y_true = radar2precipitation(y)\\n y_true = np.where(y_true >= 0.1, 1, 0)\\n y_pred = radar2precipitation(y_hat)\\n y_pred = np.where(y_pred >= 0.1, 1, 0)\\n\\n y = y * y_true\\n y_hat = y_hat * y_true\\n# mae = np.abs(y - y_hat).sum() / y_true.sum()\\n mae = np.abs(y - y_hat).mean()\\n\\n tn, fp, fn, tp = metrics.confusion_matrix(\\n y_true.reshape(-1), y_pred.reshape(-1)\\n ).ravel()\\n csi = tp / (tp + fn + fp)\\n\\n comp_metric = mae / (csi + 1e-12)\\n\\n print(\\n f\\\"Epoch {self.current_epoch} | MAE/CSI: {comp_metric} | MAE: {mae} | CSI: {csi} | Loss: {avg_loss}\\\"\\n )\\n\\n def configure_optimizers(self):\\n optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)\\n scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(\\n optimizer, T_max=self.num_train_steps\\n )\\n return [optimizer], [{\\\"scheduler\\\": scheduler, \\\"interval\\\": \\\"step\\\"}]\";\n", + " var nbb_formatted_code = \"class RainNet(pl.LightningModule):\\n def __init__(\\n self,\\n lr=1e-4,\\n enc_chs=[4, 64, 128, 256, 512, 1024],\\n dec_chs=[1024, 512, 256, 128, 64],\\n num_train_steps=None,\\n ):\\n super().__init__()\\n\\n # Parameters\\n self.lr = lr\\n self.num_train_steps = num_train_steps\\n\\n # self.criterion = LogCoshLoss()\\n self.criterion = nn.L1Loss()\\n\\n # Layers\\n self.encoder = Encoder(enc_chs)\\n self.decoder = Decoder(dec_chs)\\n self.out = nn.Sequential(\\n nn.Conv2d(64, 2, kernel_size=3, padding=1),\\n nn.ReLU(inplace=True),\\n nn.BatchNorm2d(2),\\n nn.Conv2d(2, 1, kernel_size=1),\\n nn.ReLU(inplace=True),\\n )\\n\\n def forward(self, x):\\n ftrs = self.encoder(x)\\n ftrs = ftrs[::-1]\\n x = self.decoder(ftrs[0], ftrs[1:])\\n out = self.out(x)\\n return out\\n\\n def shared_step(self, batch, batch_idx):\\n x, y = batch\\n y_hat = self(x)\\n loss = self.criterion(y_hat, y)\\n return loss, y, y_hat\\n\\n def training_step(self, batch, batch_idx):\\n loss, y, y_hat = self.shared_step(batch, batch_idx)\\n self.log(\\\"train_loss\\\", loss)\\n return {\\\"loss\\\": loss}\\n\\n def validation_step(self, batch, batch_idx):\\n loss, y, y_hat = self.shared_step(batch, batch_idx)\\n return {\\\"loss\\\": loss, \\\"y\\\": y.detach(), \\\"y_hat\\\": y_hat.detach()}\\n\\n def validation_epoch_end(self, outputs):\\n avg_loss = torch.stack([x[\\\"loss\\\"] for x in outputs]).mean()\\n self.log(\\\"val_loss\\\", avg_loss)\\n\\n tfms = nn.Sequential(\\n T.CenterCrop(120),\\n )\\n\\n y = torch.cat([x[\\\"y\\\"] for x in outputs])\\n y = tfms(y)\\n y = y.detach().cpu().numpy()\\n y = y.reshape(-1, 120 * 120)\\n\\n y_hat = torch.cat([x[\\\"y_hat\\\"] for x in outputs])\\n y_hat = tfms(y_hat)\\n y_hat = y_hat.detach().cpu().numpy()\\n y_hat = y_hat.reshape(-1, 120 * 120)\\n\\n y = 255.0 * y[:, args[\\\"dams\\\"]]\\n y = np.round(y).clip(0, 255)\\n y_hat = 255.0 * y_hat[:, args[\\\"dams\\\"]]\\n y_hat = np.round(y_hat).clip(0, 255)\\n # mae = metrics.mean_absolute_error(y, y_hat)\\n\\n y_true = radar2precipitation(y)\\n y_true = np.where(y_true >= 0.1, 1, 0)\\n y_pred = radar2precipitation(y_hat)\\n y_pred = np.where(y_pred >= 0.1, 1, 0)\\n\\n y = y * y_true\\n y_hat = y_hat * y_true\\n # mae = np.abs(y - y_hat).sum() / y_true.sum()\\n mae = np.abs(y - y_hat).mean()\\n\\n tn, fp, fn, tp = metrics.confusion_matrix(\\n y_true.reshape(-1), y_pred.reshape(-1)\\n ).ravel()\\n csi = tp / (tp + fn + fp)\\n\\n comp_metric = mae / (csi + 1e-12)\\n\\n print(\\n f\\\"Epoch {self.current_epoch} | MAE/CSI: {comp_metric} | MAE: {mae} | CSI: {csi} | Loss: {avg_loss}\\\"\\n )\\n\\n def configure_optimizers(self):\\n optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)\\n scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(\\n optimizer, T_max=self.num_train_steps\\n )\\n return [optimizer], [{\\\"scheduler\\\": scheduler, \\\"interval\\\": \\\"step\\\"}]\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -870,1379 +678,191 @@ " def __init__(\n", " self,\n", " lr=1e-4,\n", - " bn=True,\n", " enc_chs=[4, 64, 128, 256, 512, 1024],\n", " dec_chs=[1024, 512, 256, 128, 64],\n", + " num_train_steps=None,\n", " ):\n", " super().__init__()\n", - " self.save_hyperparameters()\n", "\n", - " # criterion and metrics\n", - " self.criterion = LogCoshLoss()\n", - " self.train_mae = pl.metrics.MeanAbsoluteError()\n", - " self.val_mae = pl.metrics.MeanAbsoluteError()\n", + " # Parameters\n", + " self.lr = lr\n", + " self.num_train_steps = num_train_steps\n", + "\n", + " # self.criterion = LogCoshLoss()\n", + " self.criterion = nn.L1Loss()\n", "\n", - " # layers\n", - " self.encoder = Encoder(enc_chs, bn=bn)\n", - " self.decoder = Decoder(dec_chs, bn=bn)\n", - " if bn:\n", - " self.out = nn.Sequential(\n", - " nn.Conv2d(64, 2, kernel_size=3, bias=False, padding=1),\n", - " nn.BatchNorm2d(2),\n", - " nn.ReLU(inplace=True),\n", - " nn.Conv2d(2, 1, kernel_size=1, bias=False),\n", - " nn.BatchNorm2d(1),\n", - " nn.ReLU(inplace=True),\n", - " )\n", - " else:\n", - " self.out = nn.Sequential(\n", - " nn.Conv2d(64, 2, kernel_size=3, bias=False, padding=1),\n", - " nn.ReLU(inplace=True),\n", - " nn.Conv2d(2, 1, kernel_size=1, bias=False),\n", - " nn.ReLU(inplace=True),\n", - " )\n", + " # Layers\n", + " self.encoder = Encoder(enc_chs)\n", + " self.decoder = Decoder(dec_chs)\n", + " self.out = nn.Sequential(\n", + " nn.Conv2d(64, 2, kernel_size=3, padding=1),\n", + " nn.ReLU(inplace=True),\n", + " nn.BatchNorm2d(2),\n", + " nn.Conv2d(2, 1, kernel_size=1),\n", + " nn.ReLU(inplace=True),\n", + " )\n", "\n", " def forward(self, x):\n", " ftrs = self.encoder(x)\n", - " ftrs = list(reversed(ftrs))\n", + " ftrs = ftrs[::-1]\n", " x = self.decoder(ftrs[0], ftrs[1:])\n", " out = self.out(x)\n", " return out\n", "\n", - " def _shared_step(self, batch, batch_idx):\n", + " def shared_step(self, batch, batch_idx):\n", " x, y = batch\n", " y_hat = self(x)\n", " loss = self.criterion(y_hat, y)\n", " return loss, y, y_hat\n", "\n", " def training_step(self, batch, batch_idx):\n", - " loss, y, y_hat = self._shared_step(batch, batch_idx)\n", + " loss, y, y_hat = self.shared_step(batch, batch_idx)\n", " self.log(\"train_loss\", loss)\n", - " self.log(\"train_mae\", self.train_mae(y_hat, y))\n", " return {\"loss\": loss}\n", "\n", - " def training_epoch_end(self, outputs):\n", - " self.log(\"train_mae\", self.train_mae.compute())\n", - " self.train_mae.reset()\n", - "\n", " def validation_step(self, batch, batch_idx):\n", - " loss, y, y_hat = self._shared_step(batch, batch_idx)\n", - " self.log(\"val_loss\", loss)\n", - " self.log(\"val_mae\", self.val_mae(y_hat, y))\n", - " return {\"loss\": loss, \"y\": y.detach(), \"y_hat\": y_hat.detach()}\n", - "\n", - " def validation_epoch_end(self, outputs):\n", - " avg_loss = torch.stack([x[\"loss\"] for x in outputs]).mean()\n", - " self.log(\"loss\", avg_loss)\n", - " # y = torch.cat([x[\"y\"] for x in outputs])\n", - " # y_hat = torch.cat([x[\"y_hat\"] for x in outputs])\n", - " # mae = self.mae(y_hat, y)\n", - " self.log(\"val_mae\", self.val_mae.compute())\n", - " print(f\"Epoch {self.current_epoch} | MAE: {self.val_mae.compute()}\")\n", - " self.val_mae.reset()\n", - "\n", - " def configure_optimizers(self):\n", - " optimizer = torch.optim.Adam(self.parameters(), lr=self.hparams.lr)\n", - " return optimizer" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Train" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 14;\n", - " var nbb_unformatted_code = \"model = RainNet()\";\n", - " var nbb_formatted_code = \"model = RainNet()\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "model = RainNet()" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 15;\n", - " var nbb_unformatted_code = \"datamodule = NowcastingDataModule(df, fold=0, batch_size=64)\\ndatamodule.setup()\";\n", - " var nbb_formatted_code = \"datamodule = NowcastingDataModule(df, fold=0, batch_size=64)\\ndatamodule.setup()\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "datamodule = NowcastingDataModule(df, fold=0, batch_size=64)\n", - "datamodule.setup()" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "GPU available: True, used: True\n", - "TPU available: False, using: 0 TPU cores\n", - "LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [0]\n", - "Using native 16bit precision.\n" - ] - }, - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 16;\n", - " var nbb_unformatted_code = \"trainer = pl.Trainer(\\n gpus=1,\\n max_epochs=40,\\n precision=16,\\n progress_bar_refresh_rate=50,\\n # fast_dev_run=True,\\n benchmark=True,\\n)\";\n", - " var nbb_formatted_code = \"trainer = pl.Trainer(\\n gpus=1,\\n max_epochs=40,\\n precision=16,\\n progress_bar_refresh_rate=50,\\n # fast_dev_run=True,\\n benchmark=True,\\n)\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "trainer = pl.Trainer(\n", - " gpus=1,\n", - " max_epochs=40,\n", - " precision=16,\n", - " progress_bar_refresh_rate=50,\n", - " # fast_dev_run=True,\n", - " benchmark=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### `lr_find`" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n", - " | Name | Type | Params\n", - "------------------------------------------------\n", - "0 | criterion | LogCoshLoss | 0 \n", - "1 | train_mae | MeanAbsoluteError | 0 \n", - "2 | val_mae | MeanAbsoluteError | 0 \n", - "3 | encoder | Encoder | 18 M \n", - "4 | decoder | Decoder | 18 M \n", - "5 | out | Sequential | 1 K \n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 0 | MAE: 0.05334088206291199\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "608172c29e364e248556b3e71d25f52a", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Finding best initial lr'), FloatProgress(value=0.0), HTML(value='')))" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 17;\n", - " var nbb_unformatted_code = \"lr_finder = trainer.tuner.lr_find(model, datamodule)\";\n", - " var nbb_formatted_code = \"lr_finder = trainer.tuner.lr_find(model, datamodule)\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "lr_finder = trainer.tuner.lr_find(model, datamodule)" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEKCAYAAAAIO8L1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAApDElEQVR4nO3dd3yV9f3+8dc7mySQMBJGwjYgiMyIKIha1IILrajgoE5EtNbKty3+vvq1rW3tUEQEBzhRK6W4cE8UEBlBEdmEJWEmjIQQsj+/P3K0MRwgIblzcpLr+XicBzn3/TnnXNzGc3Fvc84hIiJSUUigA4iISN2kghAREb9UECIi4pcKQkRE/FJBiIiIXyoIERHxKyzQAWpSixYtXIcOHQIdQ0QkaCxbtizLOZfgb169KogOHTqQlpYW6BgiIkHDzLYebZ6nm5jMbKiZrTOzdDOb4Ge+mdlk3/wVZtbXN72rmS0v98gxs7u9zCoiIj/l2RqEmYUCU4HzgQxgqZnNcc6tLjdsGJDie5wOPAmc7pxbB/Qu9z7bgTe8yioiIkfycg2iP5DunNvknCsEZgLDK4wZDsxwZRYB8WbWusKYIcBG59xRV4NERKTmeVkQScC2cs8zfNOqOmYk8OrRPsTMxphZmpmlZWZmViOuiIiU52VBmJ9pFa8MeMwxZhYBXAr852gf4pyb5pxLdc6lJiT43REvIiInwMuCyADalnueDOyo4phhwNfOud2eJBQRkaPysiCWAilm1tG3JjASmFNhzBxgtO9opgFAtnNuZ7n5ozjG5qWasnBjFpuzDnn9MSIiQcWzo5icc8VmdifwIRAKPOecW2VmY33znwLeAy4E0oE84MYfXm9m0ZQdAXWbVxkBcvKLuPXFNM7umsAT1/ar1ntNnZvO/A2Z9GvflNT2zejfsRkxkfXqVBMRaUA8/fZyzr1HWQmUn/ZUuZ8dcMdRXpsHNPcyH0CTqHBuOasTj326ge8ysjk1Oe6E3mf1jhwe+WgdiY2jSNuyn6mlG+nYIoZ37xpEdIRKQkSCj67FBNxyVkfio8N5+KN1J/R65xwPzFlJfHQEH9x9Fiv+cAGPjezN5qxDTPpkQw2nFRGpHSoIoHFUOLef3Zkv1meyeNPeKr/+reU7WLplP7/7eVfioyOIjghjeO8kRvVvyzPzN7Fye7YHqUVEvKWC8Bl9RgcSG0fy8Efr8Hef7u8ysnlnxQ6WbzvA3tyCH8cczC/iL++toVdyHFeltv3JayYM7UazmEgmvL6C4pLSWvl7iIjUFG0c92kUEcqvhqRw/5sr+Xx9Jud2TfxxXl5hMdc/t5gDeUU/TosIC6Flk0jCQkLIPFjA9NGphIT89LSOuOhw/njpKdzxr6+ZOncj487tTHiot51cUFzCe9/tpLjEERsZRnhoCJuyclm9I4et+/IYeVpbrkpti5m/U1BERP5LBVHO1altmTZvIw9/uI6zUxJ+/MKfvSyDA3lFPDayNzERYWzbn8eu7Hx25+SzKyefy/sk0bttvN/3vPDUVpzfvSWPfrKe577czJBuiQzr0ZpzuiYctSyy84rYlZNPSmLsEaVzLJkHC7j95WWkbd1/xLw2cVHERoXx+9e+4/N1mTz0i1OJj46o9HuLSMOjgignIiyE35zXhXtmfcsHq3Zx4amtKSl1PDN/M33bxTO8d8WrgByfmTHlmj7MXZvJR6t28emaPbz+9XZaxEYyol8yZ3dJ4GB+EVm5hWzOyuWrTXtZtSMH56BFbATndE1k6CmtGNIt8Yh/9Tvnfpy2cns2Y2aksS+vkMdG9qZvu6bkFhSTX1RC++YxNIuJoLTUMX3+Jh7+aB3fTDrAjQM7cO7JiaQkxlJQXMqKjGxWZBzglDZxDOjUTGsZIg2c+dveHqxSU1Ndde8HUVLqGDppHqXO8eHdg/l49W5uf+VrnrquL0N7VLyOYNUVlZTyxbpMZi7dxtx1eygp/e/yjwgNoU+7eAZ0ak5SfCMWpGfx+bo95OQXc163lvz9ilNpHhtJTn4Rkz7ewMuLtuJwRIaFkl9UQkLjSKaPTqVH0rEP1f0uI5v73vyObzPKdp63bBLJ/rwiCov/u5/k1KQ4bh3ciSEnJ+pcDpF6zMyWOedS/c5TQRzpg5W7GPvyMv4xoif/Wvw9+/MK+Wz8OYRWYXNPZezJyWfNroM0i46gReMImsdEEhH2081OxSWlvLBwC//4YB1x0eH88oz2vPjVVrJyC7i8dxIt46IoKColPMy4eVBHEhtHVfrzd2Yf5vN1mXyZnkWrJlH079iMnsnxfLZ2D8/M38Qm39nlLWIj6dA8mrhG4YSFGmEhIXROjOVnJyfSMymOkBDjYH4R63fnUlRSSkLjSBIbRxIbGaa1EJE6TgVRRc45hk/9ks1ZhziYX8yDw0/h+jM6VD9gNazZmcOvZ37D+t259EqO48HLetAzOd6zzystdSxIz2Lljmy2ZuWxZe8hcguKKS5xFJaUsnXvIUpdWXlEhYeQsf/wEe+RFN+IoT1aceGprWgd14hVO3L4bns2hwqKad88mvbNY2jbtBGt4xrRKCLUs7+LiBydCuIEzFufyejnltA0OpyFE4bUiS+w/KISvvn+AKd3bFalndde2H+okC/WZ/64maxb6yZ0bdmYRhGh7DmYz67sApZu2ceCDVkUljvEN8TK9vXkF/30sN/46HC6tWrC7ed05qyUFlrzEKklKogT4JzjvjdX0iMpjlH929XIezZEOflFzF27h+zDRZzSJo5urRvTKDyUzIMFbNmbx7Z9eezKyWdn9mE+W7OHHdn5pLZvyk2DOtIqLor4RuEkNokiVvtBRDyhgpCgUFBcwqy0DKZ+ls6unPwfp0eEhXDd6e0Ze06nKu1jEZHjU0FIUMkvKmH1zhyy84o4cLiQL9P38sY32wkPNUae1o7+HZvRvXUT2jWLDvimNpFgp4KQoLc56xCPfbKed1bspNh3aHB8dDjDe7Xh6tPa0b1NkwAnFAlOKgipN/KLStiwO5dVO7L5cuNePly5i8KSUnolx3Hnz1I4z88JhSJydCoIqbf2HyrkzeXbeWHhFrbuzaN323jGX9CFQSfpSCiRylBBSL1XVFLK619nMPnTdLYfOEzP5DhuPasTw3q0IszjCySKBDMVhDQYBcUlvLZs+49ngic3bcQ/rujJmSe1CHQ0kTrpWAWhf1pJvRIZFso1p7fjk3vOZtr1/YgMC+HaZxcz6ZP1P7nulYgcnwpC6qWQEOOCU1ox585BXN47iUmfbGD0c4vZczD/+C8WEUAFIfVcTGQYj1zVi3+M6Mmyrfu58LEFfJmeFehYIkFBBSH1nplxVWpb3rpjEPHR4Vz37GImfrROm5xEjkMFIQ1G11aNmXPnQK7om8zkz9K5dUYauQXFgY4lUmepIKRBiY4I4+Ere/HgZT34Yn0mI55cyI4DR16qXERUENJAXT+gPc/dcBrb9x9m+NQvWbZ1X6AjidQ5KghpsM7uksBr486kUXgoVz29iKlz0ynVfgmRH6kgpEHr0rIx79w1iGE9WvHPD9fxy+eX6FBYER8VhDR4TaLCeXxUHx76xaks2byPYZPm89na3YGOJRJwKggRyg6FHdW/HW//ahAJjSO56YU0HnhrJflFJYGOJhIwnhaEmQ01s3Vmlm5mE/zMNzOb7Ju/wsz6lpsXb2azzWytma0xszO8zCoCZZuc3rpzIDcP6siLX23l0ikLWLMzJ9CxRALCs4Iws1BgKjAM6A6MMrPuFYYNA1J8jzHAk+XmPQZ84Jw7GegFrPEqq0h5kWGh3H9xd2bc1J/9eUUMn/Ilzy7YrB3Y0uB4uQbRH0h3zm1yzhUCM4HhFcYMB2a4MouAeDNrbWZNgMHAswDOuULn3AEPs4ocYXCXBD749VkM7pLAg++s5vZXlnG4UJucpOHwsiCSgG3lnmf4plVmTCcgE3jezL4xs2fMLMbfh5jZGDNLM7O0zMzMmksvAjSPjWT66H7cd1E3Plq9m5HTvtJRTtJgeFkQ/m7nVXEd/WhjwoC+wJPOuT7AIeCIfRgAzrlpzrlU51xqQkJCdfKK+GVm3HJWJ6Zdn8r63blcPnUhG3YfDHQsEc95WRAZQNtyz5OBHZUckwFkOOcW+6bPpqwwRALm/O4tmXXbGRSWlHL9s0t0iQ6p97wsiKVAipl1NLMIYCQwp8KYOcBo39FMA4Bs59xO59wuYJuZdfWNGwKs9jCrSKWcmhzHjJv6c6igmBueX0J2XlGgI4l4xrOCcM4VA3cCH1J2BNIs59wqMxtrZmN9w94DNgHpwHRgXLm3+BXwipmtAHoDf/Uqq0hVdGvdhKdH92Nz1iFufSlN50pIvaV7UoucoDnf7uCuV7/hop6teXxkH0JC/O1SE6nbjnVP6rDaDiNSX1zaqw07DxzmoffX0q5ZNL8fenKgI4nUKBWESDWMGdyJrfvyePLzjbRrFs2o/u0CHUmkxqggRKrBzPjTpaewff9h7ntzJW3iG3F2Fx1uLfWDLtYnUk1hoSFMuaYPKYmx3P7yMr7ddiDQkURqhApCpAY0jgpnxk39aRYTwQ3PLyF9T26gI4lUmwpCpIYkNoni5ZtPJzTEGP3sYnZm60Q6CW4qCJEa1KFFDC/c2J+D+cXcOiONwuLSQEcSOWEqCJEa1iMpjoev6sXK7TlM/Hh9oOOInDAVhIgHfn5KK0b1b8vT8zaycGNWoOOInBAVhIhH7r+4Ox2bx3DPv7/lQF5hoOOIVJkKQsQj0RFhTBrZm6zcAu59/Tvq02VtpGFQQYh4qGdyPP/z8668v3IXLy/aGug4IlWighDx2JizOnFO1wQefGcNK7dnBzqOSKWpIEQ8FhJiPHJlL5rGhHPnv77mYL7uISHBQQUhUguax0YyeWQfvt+Xx6Sn34dx46BJEwgJKftz3DjYuDHQMUV+QgUhUktO79SciY13MP5/rqJ0+nQ4eBCcK/vzmWegZ094//1AxxT5kQpCpLZs3Mjwv9xNdHEBIcXFP51XVAR5eTBihNYkpM5QQYjUlkcewYqOs/+hqAgefbR28ogchwpCpLa8/HJZARxLURG89FLt5BE5DhWESG3JreQlwCs7TsRjKgiR2hIbW7PjRDymghCpLdddB+Hhxx4THg7XX187eUSOQwUhUlvGj69cQfzmN7WTR+Q4VBAitaVzZ5g9G6KjjyiKwpBQCiOjyuZ37hyggCI/pYIQqU3DhsGKFTBmzE/OpF4x7CrO++XjfN6pX6ATivxIBSFS2zp3hilTIDsbSkogO5seb7xE1Mkp/G72Ct07QuoMFYRIHRAVHsrEq3qz71Ah97+1KtBxRAAVhEid0SMpjrvPS+Htb3cw59sdgY4jooIQqUvGnt2ZPu3iuf/NlezJyQ90HGngPC0IMxtqZuvMLN3MJviZb2Y22Td/hZn1LTdvi5l9Z2bLzSzNy5widUVYaAgPX9mL/KIS7ntzpW5TKgHlWUGYWSgwFRgGdAdGmVn3CsOGASm+xxjgyQrzz3XO9XbOpXqVU6Su6ZwQy/gLuvDR6t28vWJnoONIA+blGkR/IN05t8k5VwjMBIZXGDMcmOHKLALizay1h5lEgsLNgzrRu208D7y1ksyDBYGOIw2UlwWRBGwr9zzDN62yYxzwkZktM7MxnqUUqYNCQ4yHr+zJocIS/u+tlYGOIw2UlwVhfqZV3KB6rDEDnXN9KdsMdYeZDfb7IWZjzCzNzNIyMzNPPK1IHXNSYmPuPi+F91fu4p0VOqpJap+XBZEBtC33PBmo+Ft+1DHOuR/+3AO8QdkmqyM456Y551Kdc6kJCQk1FF2kbhhzVid6tS07qkmbmqS2eVkQS4EUM+toZhHASGBOhTFzgNG+o5kGANnOuZ1mFmNmjQHMLAa4ANB6tjQ4YaEhPOLb1HS/jmqSWuZZQTjnioE7gQ+BNcAs59wqMxtrZmN9w94DNgHpwHRgnG96S2CBmX0LLAHedc594FVWkbrspMTGjD+/Cx+s2qWjmqRWWX36F0lqaqpLS9MpE1L/lJQ6Rjy1kM1Zh/joN4NJbBwV6EhST5jZsqOdSqAzqUWCQGiI8c8RvcjTpiapRSoIkSBxUmIs95zfhQ9X7eYdbWqSWqCCEAkitwzqSK+28TwwZxVZuTqqSbylghAJImGhITw8oie5+cU8oMuCi8dUECJBJqVlY359XgrvfreTt5ZvD3QcqcdUECJB6LbBnX68LPjO7MOBjiP1lApCJAiFhYbw6FW9KSpx/G72CkpLdVST1DwVhEiQ6tAihvsu7sb8DVm8tGhroONIPaSCEAli1/Rvx7ldE/jre2tI33Mw0HGknlFBiAQxM+PvI3oSExnGr2cup7C4NNCRpB5RQYgEucTGUfz9ip6s2pHDIx+vC3QcqUdUECL1wPndWzKqfzumzdvEwo1ZgY4j9YQKQqSeuP/ibnRsHsP4Wd9yIK8w0HGkHqhUQfjuzxDi+7mLmV1qZuHeRhORqoiOCOOxkX3Iyi3g3te/0wX9pNoquwYxD4gysyTgU+BG4AWvQonIiTk1OY7/uaAr76/cxcyl247/ApFjqGxBmHMuD/gF8Lhz7nKgu3exRORE3XpWJwad1II/vr1Kh75KtVS6IMzsDOBa4F3ftDBvIolIdYSEGBOv6kV0RBi/enU5+UUlgY4kQaqyBXE3cC/whu+2oZ2AuZ6lEpFqSWwSxT9H9GTNzhz+9v7aQMeRIFWptQDn3BfAFwC+ndVZzrm7vAwmItUzpFtLbhzYgee/3MLAk1pwfveWgY4kQaayRzH9y8yamFkMsBpYZ2a/9TaaiFTXhGEnc0qbJvx29rfsOKCrvkrVVHYTU3fnXA5wGfAe0A643qtQIlIzIsNCmXJNX4qKS7l75nKKS3QpDqm8yhZEuO+8h8uAt5xzRYAOshYJAh1bxPDny3uwZMs+Hvt0Q6DjSBCpbEE8DWwBYoB5ZtYeyPEqlIjUrMv7JHNVajJT5qYzf0NmoONIkKhUQTjnJjvnkpxzF7oyW4FzPc4mIjXoj5f2ICUxlrtnLmd3Tn6g40gQqOxO6jgzm2hmab7HI5StTYhIkGgUEcrUa/qSV1jCXa9+o/0RclyV3cT0HHAQuMr3yAGe9yqUiHgjpWVj/nxZDxZv3sejn6wPdByp4yp7NnRn59wV5Z7/0cyWe5BHRDx2Rb9k0rbuY+rcjfRp25TzdH6EHEVl1yAOm9mgH56Y2UBAB1WLBKkHLjmFHklN+M2s5Xy/Ny/QcaSOqmxBjAWmmtkWM9sCTAFu8yyViHgqKjyUJ6/tR4gZY19epus1iV+VPYrpW+dcL6An0NM51wf4mafJRMRTbZtFM+nq3qzemcP/vrFS94+QI1TpjnLOuRzfGdUA9xxvvJkNNbN1ZpZuZhP8zDczm+ybv8LM+laYH2pm35jZO1XJKSKVc+7Jidw1JIXXvs7glcXfBzqO1DHVueWoHXOmWSgwFRhG2b0jRplZxXtIDANSfI8xwJMV5v8aWFONjCJyHHcPSeGcrgn88e1VfPP9/kDHkTqkOgVxvPXR/kC6c26Tc64QmAkMrzBmODDDd/LdIiDezFoDmFkycBHwTDUyishxhIQYk67uTau4KG5/+WuycgsCHUnqiGMWhJkdNLMcP4+DQJvjvHcSUP6ehxm+aZUdMwn4HXDMs3nMbMwPJ/BlZuoSAiInIj46gqeu68eBw4WMe+VrinQSnXCcgnDONXbONfHzaOycO945FP42QVVc6/A7xswuBvY455Yd5zNwzk1zzqU651ITEhKON1xEjuKUNnH8/YqeLNm8jz+9vTrQcaQOqM4mpuPJANqWe54M7KjkmIHApb5DamcCPzOzl72LKiIAw3sncdvgTry0aCuvLtFO64bOy4JYCqSYWUcziwBGAnMqjJkDjPYdzTQAyHbO7XTO3eucS3bOdfC97jPn3HUeZhURn98NPZnBXRL4v7dWkrZlX6DjSAB5VhDOuWLgTuBDyo5EmuW7n/VYMxvrG/YesAlIB6YD47zKIyKVExpiPD6yD0nxjRj78jIy9utM64bK6tPJMampqS4tLS3QMUTqhfQ9uVz+xJckN41m9tgziIms7KXbJJiY2TLnXKq/eV5uYhKRIHZSYiyPj+rDul05jJ/1LaWl9ecfk1I5KggROapzuiby/y7sxgerdjHxY10evKHROqOIHNPNgzqyYXcuU+am07FFDFf0Sw50JKklWoMQkWMyMx68rAdndGrOhNdXsGSzjmxqKFQQInJcEWEhPHVdP9o2jea2l9LYknUo0JGkFqggRKRS4qLDee6G03DAjS8sZf+hwkBHEo+pIESk0jq0iGH66FS2HzjMmJfSdKOhek4FISJVclqHZjxyZS+WbtnP//xHh7/WZzqKSUSq7JJebdh+4DB/e38tSfGNuPfCboGOJB5QQYjICbltcCd2HDjM0/M2kdgkipsHdQx0JKlhKggROSFmxgOXnELmwQIefGc1CY0jubTX8W4TI8FE+yBE5ISFhhiPXt2b0zs2Y/ys5SzYkBXoSFKDVBAiUi1R4aFMG51K54RYbnspjRUZBwIdSWqICkJEqi2uUTgv3tSfpjER3PD8UjZl5gY6ktQAFYSI1IiWTaJ46ebTMeD6Z5ewKzs/0JGkmlQQIlJjOraI4cWb+nMgr5Drn12ss62DnApCRGpUj6Q4pv8yla378rjh+SXkFhQHOpKcIBWEiNS4Mzu34Ilr+rJyRw63vLhUl+QIUioIEfHEed1b8siVvVi8eR/jXvmawuLSQEeSKlJBiIhnLuuTxJ8v68Fna/dw16vfUFSikggmKggR8dS1p7fn/y7uzgerdnHPrG8p0cX9goYutSEinrtpUEcKS0r52/trCQ81Hh7Ri5AQC3QsOQ4VhIjUirFnd6awuJSJH68nIjSEv15+qkqijlNBiEituWtICkUlpTz+WTphocaDw3tgppKoq1QQIlKr7jm/C4UlpTz9xSbCQkJ44JLuKok6SgUhIrXKzJgw9GRKShzPLNhMYUkpfx7eQ5ub6iAVhIjUOjPjfy/qRkRYCE98vpGColL+MaInoSqJOkUFISIBYWb89uddiQoPZeLH6ykoLuHRq3sTHqqj7+sKFYSIBIyZcdeQFCLDQnjo/bXkFZbwxLV9iQoPDXQ0weMT5cxsqJmtM7N0M5vgZ76Z2WTf/BVm1tc3PcrMlpjZt2a2ysz+6GVOEQms287uzJ8v68HcdXt0gb86xLOCMLNQYCowDOgOjDKz7hWGDQNSfI8xwJO+6QXAz5xzvYDewFAzG+BVVhEJvOsGtGfS1b1ZumU/105fpEuF1wFerkH0B9Kdc5ucc4XATGB4hTHDgRmuzCIg3sxa+57/cEuqcN9D5+eL1HPDeyfx1HX9WLPrIFc+/RU7sw8HOlKD5mVBJAHbyj3P8E2r1BgzCzWz5cAe4GPn3GLvoopIXXF+95bMuKk/u7LzGfHkV7p9aQB5WRD+jleruBZw1DHOuRLnXG8gGehvZj38fojZGDNLM7O0zMzM6uQVkTpiQKfmzBwzgPyiEkY89RXffL8/0JEaJC8LIgNoW+55MrCjqmOccweAz4Gh/j7EOTfNOZfqnEtNSEioZmQRqSt6JMXxn7FnEBsZxqjpi/h49e5AR2pwvCyIpUCKmXU0swhgJDCnwpg5wGjf0UwDgGzn3E4zSzCzeAAzawScB6z1MKuI1EGdEmJ5fdyZdG3ZmNteSuOlRVsDHalB8awgnHPFwJ3Ah8AaYJZzbpWZjTWzsb5h7wGbgHRgOjDON701MNfMVlBWNB87597xKquI1F0tYiN5dcwAzu2ayP1vruSh99ZQqntK1Apzrv4s6NTUVJeWlhboGCLigeKSUv749mpeWrSVC09txcSreuuEuhpgZsucc6n+5umcdhEJCmGhIfxp+Cncd1E33l+5i1HTF5GVWxDoWPWaCkJEgoaZcctZnXjimr6s2ZnDZVO/ZP3ug4GOVW+pIEQk6Aw7tTX/HnMGBcWlXPHEQuat1yHuXlBBiEhQ6tU2njfvGEhS00bc+MJSnluwmfq0T7UuUEGISNBKim/E7NvPZMjJifzpndX8dvYK8otKAh2r3lBBiEhQi40M46nr+vHrISnMXpbByGmL2JWdH+hY9YIKQkSCXkiI8Zvzu/DUdX1Zv/sgl0xZQNqWfYGOFfRUECJSbwzt0Zo37xhIbGQYI6ct4qVFW7VfohpUECJSr3Rp2Zg37xjI4C4J3P/mSu2XqAYVhIjUO3GNwnlmdOqP+yVGPLWQbfvyAh0r6KggRKRe+mG/xLO/TGXr3jwumbKAuev2BDpWUFFBiEi9NqRbS96+cxCtmkRx4/NL+eeHaykuKQ10rKCgghCReq9DixjevGMgI09ry9S5G7n2mcU6FLYSVBAi0iBEhYfytyt68siVvViRkc2Fk+drk9NxqCBEpEG5ol8yb/9qEImNI7nx+aX85d3VFBZrk5M/KggRaXBOSozlzTsGcv2A9kyfv5krnlzIpszcQMeqc1QQItIgRYWH8uBlPXj6+n5s25/HRZMX8O+l3+vEunJUECLSoP38lFZ88OvB9GkXz+9f+46xLy9jr25EBKggRERoFRfFyzefzv9e2I25azP5+aT5zF2rHdgqCBERyk6su3VwJ966cyAtYiO48YWlTHhtBQfziwIdLWBUECIi5XRr3YQ37xjIbWd3YlbaNn7+6Dzmb2iYd6xTQYiIVBAVHsq9w7ox+/YziYoI5fpnlzDhtRXkNLC1CRWEiMhR9G3XlPfuOovbBpetTVwwcR6frtkd6Fi1RgUhInIMUeGh3HthN94YN5C4RuHc/GIad/zra/bk1P9LdaggREQqoVfbeN7+1SDuOb8LH6/ezZBHvuDFhVsoKa2/502oIEREKikiLIS7hqTw0d2D6d0ungfmrOLSenx7UxWEiEgVdWgRw4yb+vP4qD7sO1TIiKe+4u6Z37C7nm12UkGIiJwAM+OSXm34dPzZ/OpnJ/Heyl387OHPeWb+Jorqyf0mVBAiItUQHRHG+Au68vFvBtO/YzP+/O4aLpo8n6827g10tGpTQYiI1ID2zWN47obTmD46lbzCEkZNX8S4V5aRsT9474XtaUGY2VAzW2dm6WY2wc98M7PJvvkrzKyvb3pbM5trZmvMbJWZ/drLnCIiNcHMOL97Sz6552zuOb8Ln63dw5BHvmDiR+s4VFAc6HhV5llBmFkoMBUYBnQHRplZ9wrDhgEpvscY4Enf9GJgvHOuGzAAuMPPa0VE6qSo8FDuGpLCZ+PP4YJTWjH5s3TOffhz/pO2jdIgOizWyzWI/kC6c26Tc64QmAkMrzBmODDDlVkExJtZa+fcTufc1wDOuYPAGiDJw6wiIjWuTXwjHh/Vh9duP5M28Y347ewVXDJlAQs3ZgU6WqV4WRBJwLZyzzM48kv+uGPMrAPQB1js70PMbIyZpZlZWmZmw7yglojUbf3aN+X128/ksZG9OZBXxDXTF3PLi0vZsPtgoKMdk5cFYX6mVVy3OuYYM4sFXgPuds7l+PsQ59w051yqcy41ISHhhMOKiHgpJMQY3juJT8efze+HnsziTfu4YNI87p75DZuzDgU6nl9hHr53BtC23PNkYEdlx5hZOGXl8Ipz7nUPc4qI1Jqo8FBuP6czV5/WlqfnbWTGwq28vWInl/Zqw82DOtIjKS7QEX/k5RrEUiDFzDqaWQQwEphTYcwcYLTvaKYBQLZzbqeZGfAssMY5N9HDjCIiAdEsJoJ7h3Vj3u/O5cYzO/DRql1c/PgCRk77irnr9tSJe2OblyHM7EJgEhAKPOec+4uZjQVwzj3lK4IpwFAgD7jROZdmZoOA+cB3wA+nJP4/59x7x/q81NRUl5aW5s1fRkTEQzn5Rfx7yTae/3IzO7Lz6dMunvHnd2XgSc0p+6r0hpktc86l+p1XF1qqpqggRCTYFRaXMntZBo9/toGd2fn079CMu89L4YzO3hSFCkJEJMgUFJcwc8k2nvg8nd05BZzWoSl3DUlh0EktarQoVBAiIkEqv6iEWWnbePLzjezMzqdXchx3nHsS53VrSUhI9YtCBSEiEuQKikt4/evtPPn5Rr7fl0dKYiy/PLMDl/dJIibyxA9IVUGIiNQTxSWlvL1iB88u2MzK7Tk0jgrjyn5t+f2wrkSGhVb5/Y5VEF6eByEiIjUsLDSEy/skc1nvJL7+fj8vLNzKsq37iAit+bMWVBAiIkHIzOjXvhn92jejuKTUkyOcdD8IEZEgF+bB2gOoIERE5ChUECIi4pcKQkRE/FJBiIiIXyoIERHxSwUhIiJ+qSBERMSvenWpDTPLBLYGOkcNawEExx3O6wYtr6rR8qqa+ri82jvn/N6vuV4VRH1kZmlHu06KHEnLq2q0vKqmoS0vbWISERG/VBAiIuKXCqLumxboAEFGy6tqtLyqpkEtL+2DEBERv7QGISIifqkgRETELxWEiIj4pYIIYmZ2lpk9ZWbPmNnCQOep68zsHDOb71tm5wQ6T11nZt18y2q2md0e6Dx1nZl1MrNnzWx2oLPUFBVEgJjZc2a2x8xWVpg+1MzWmVm6mU041ns45+Y758YC7wAvepk30GpieQEOyAWigAyvstYFNfT7tcb3+3UVUK9PDquh5bXJOXezt0lrl45iChAzG0zZl9UM51wP37RQYD1wPmVfYEuBUUAo8FCFt7jJObfH97pZwC3OuZxail/ramJ5AVnOuVIzawlMdM5dW1v5a1tN/X6Z2aXABGCKc+5ftZW/ttXw/4+znXMjaiu7l8ICHaChcs7NM7MOFSb3B9Kdc5sAzGwmMNw59xBwsb/3MbN2QHZ9LgeoueXlsx+I9CRoHVFTy8s5NweYY2bvAvW2IGr496ve0CamuiUJ2FbueYZv2rHcDDzvWaK6rUrLy8x+YWZPAy8BUzzOVhdVdXmdY2aTfcvsPa/D1UFVXV7NzewpoI+Z3et1uNqgNYi6xfxMO+Y2QOfcAx5lCQZVWl7OudeB172LU+dVdXl9DnzuVZggUNXltRcY612c2qc1iLolA2hb7nkysCNAWYKBllfVaHlVTYNfXiqIumUpkGJmHc0sAhgJzAlwprpMy6tqtLyqpsEvLxVEgJjZq8BXQFczyzCzm51zxcCdwIfAGmCWc25VIHPWFVpeVaPlVTVaXv7pMFcREfFLaxAiIuKXCkJERPxSQYiIiF8qCBER8UsFISIifqkgRETELxWE1HtmllvLn1er9+Yws3gzG1ebnykNgwpCpIrM7JjXMHPOnVnLnxkPqCCkxulifdIgmVlnYCqQAOQBtzrn1prZJcB9QASwF7jWObfbzP4AtAE6AFlmth5oB3Ty/TnJOTfZ9965zrlY313r/gBkAT2AZcB1zjlnZhcCE33zvgY6Oed+cglpM7sBuIiyGxzF+O7N8BbQFAgH7nPOvQX8DehsZsuBj51zvzWz31J2o59I4I0GflFHOVHOOT30qNcPINfPtE+BFN/PpwOf+X5uyn+vMHAL8Ijv5z9Q9gXfqNzzhZR9AbegrEzCy38ecA6QTdlF3kIou5TDIMq+8LcBHX3jXgXe8ZPxBsouGNfM9zwMaOL7uQWQTtkVRzsAK8u97gJgmm9eCGV3HBwc6P8OegTfQ2sQ0uCYWSxwJvAfsx+v6PzDDYSSgX+bWWvK1iI2l3vpHOfc4XLP33XOFQAFZrYHaMmRtzJd4pzL8H3ucsq+zHOBTc65H977VWDMUeJ+7Jzb90N04K++u5+VUnZvgpZ+XnOB7/GN73kskALMO8pniPilgpCGKAQ44Jzr7Wfe45TdjnROuU1EPzhUYWxBuZ9L8P//k78x/u4zcDTlP/NayjaJ9XPOFZnZFsrWRioy4CHn3NNV+ByRI2gntTQ4ruz2rJvN7EoAK9PLNzsO2O77+ZceRVgLdCp3i8urK/m6OGCPrxzOBdr7ph8EGpcb9yFwk29NCTNLMrPE6seWhkZrENIQRJtZ+U0/Eyn71/iTZnYfZTt8ZwLfUrbG8B8z2w4sAjrWdBjn3GHfYakfmFkWsKSSL30FeNvM0oDllBUNzrm9Zvalma0E3ndlO6m7AV/5NqHlAtcBe2r4ryL1nC73LRIAZhbrnMu1sm/wqcAG59yjgc4lUp42MYkExq2+ndarKNt0pP0FUudoDUJERPzSGoSIiPilghAREb9UECIi4pcKQkRE/FJBiIiIXyoIERHx6/8D7d15Zo3uCIEAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - }, - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 18;\n", - " var nbb_unformatted_code = \"fig = lr_finder.plot(suggest=True)\";\n", - " var nbb_formatted_code = \"fig = lr_finder.plot(suggest=True)\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "fig = lr_finder.plot(suggest=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0.0001445439770745928" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 20;\n", - " var nbb_unformatted_code = \"model.hparams.lr = lr_finder.suggestion()\\nmodel.hparams.lr\";\n", - " var nbb_formatted_code = \"model.hparams.lr = lr_finder.suggestion()\\nmodel.hparams.lr\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "model.hparams.lr = lr_finder.suggestion()\n", - "model.hparams.lr" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## `fit`" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 21;\n", - " var nbb_unformatted_code = \"# model.hparams.lr = 2e-4\";\n", - " var nbb_formatted_code = \"# model.hparams.lr = 2e-4\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# model.hparams.lr = 2e-4" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n", - " | Name | Type | Params\n", - "------------------------------------------------\n", - "0 | criterion | LogCoshLoss | 0 \n", - "1 | train_mae | MeanAbsoluteError | 0 \n", - "2 | val_mae | MeanAbsoluteError | 0 \n", - "3 | encoder | Encoder | 18 M \n", - "4 | decoder | Decoder | 18 M \n", - "5 | out | Sequential | 1 K \n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "22519c6f56f54ead9c89b6152eed40d2", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validation sanity check'), FloatProgress(value=1.0, bar_style='info', layout=Layout…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1 | MAE: 0.05334088206291199\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "39b074271472418db0ce4c3b47c25efd", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Training'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), max…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "b3ac18fd4aca4ce29405da3a9dda7fba", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1 | MAE: 0.014351904392242432\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "d96949d92f1c4669a79c8241bc946d3e", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 2 | MAE: 0.013196761719882488\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "8525e3d356844fb985e71af59bd70e77", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 3 | MAE: 0.013001780956983566\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "53028f573c9f45d5822f384652548edf", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4 | MAE: 0.012537709437310696\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "b35660f83a6649d0a12f52b85be6de49", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 5 | MAE: 0.012411631643772125\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "2390ef2fb3724451b131fa23f9562632", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 6 | MAE: 0.012339100241661072\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "f5fbe665429c4f119ed8e3cd747e2ff1", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 7 | MAE: 0.01189120952039957\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "fb73ebe7f6214e73997de44abdb2ea9d", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 8 | MAE: 0.012403431348502636\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "90ae35a39d3e49c6a5b2c7294de3db97", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 9 | MAE: 0.012217310257256031\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "3146f10e97be48e9a5bd4b7529724272", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 10 | MAE: 0.012288837693631649\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "0d3ad3cb154b46949efd37e8cb95d6b3", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 11 | MAE: 0.011968758888542652\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "32300e85e8d24a128e1a9c8f53f7e2da", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 12 | MAE: 0.012066589668393135\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "8bb8f57cd7b84401bf548ea046affac2", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 13 | MAE: 0.011628727428615093\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "61d5cda8e5144d0cab9149af06662af1", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 14 | MAE: 0.011522951535880566\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "8527007f67c84a5b942fc23c711deb01", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 15 | MAE: 0.012007856741547585\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "001bd9af14fc496c90a314ba7e4a3f79", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 16 | MAE: 0.011744077317416668\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "13d7f90b77594073b5e6dca629047a19", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 17 | MAE: 0.011944593861699104\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "0acc8e4341034ef0ba404f76f74956a2", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 18 | MAE: 0.012012067250907421\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "6bad82f7d90847d288016af5a11732dd", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 19 | MAE: 0.011811992153525352\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "61403c10b72d4304bbde434605a27c38", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 20 | MAE: 0.011662784032523632\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "c8e678afa3be4c08ad19a8756e2f7c11", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 21 | MAE: 0.011700300499796867\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "b2508d199c9c43b9a61dfc520f466fbf", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 22 | MAE: 0.011596720665693283\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "a54a3af420414b2989652588596ca967", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 23 | MAE: 0.01195605006068945\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "908dfb4b72b341bd933de152a3a619c9", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 24 | MAE: 0.011828754097223282\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "0c2a0393fc11499eabd162384fcfabd0", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 25 | MAE: 0.011397678405046463\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "fa0a19fcddca4ceda3fae6ceda39e686", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 26 | MAE: 0.011438102461397648\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "f1724fb665c143d1811cdeb40e449bf0", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 27 | MAE: 0.011640344746410847\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "e77fd50bb32041e3ae8bbe23e8d3cdc9", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 28 | MAE: 0.011868203990161419\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "336593e3d1234cc9866ac4bcbaff6606", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 29 | MAE: 0.011692555621266365\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "aa97239a8781492bb23d959ea3853e69", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 30 | MAE: 0.0124813262373209\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "2e8317d0eec04df48087e6599d23178e", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 31 | MAE: 0.011777856387197971\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "ef591e58af5c4cafa6fddbf5fc911d74", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 32 | MAE: 0.011196047067642212\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "5ab934708b7c482285e867c1168a102c", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 33 | MAE: 0.011469211429357529\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "1d74d212f0614a649edb2f939647fa40", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 34 | MAE: 0.011484542861580849\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "30eeea42895c40358413cdc2672337ee", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 35 | MAE: 0.011434459127485752\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "a3fe597c790d4ffaabdb42ccb60f7af9", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 36 | MAE: 0.01203860528767109\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "6326419000324895988c34e9ed54f7d7", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 37 | MAE: 0.011480816639959812\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "0ed1a13c830f431bbb22d69f7af4d64a", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, + " loss, y, y_hat = self.shared_step(batch, batch_idx)\n", + " return {\"loss\": loss, \"y\": y.detach(), \"y_hat\": y_hat.detach()}\n", + "\n", + " def validation_epoch_end(self, outputs):\n", + " avg_loss = torch.stack([x[\"loss\"] for x in outputs]).mean()\n", + " self.log(\"val_loss\", avg_loss)\n", + "\n", + " tfms = nn.Sequential(\n", + " T.CenterCrop(120),\n", + " )\n", + "\n", + " y = torch.cat([x[\"y\"] for x in outputs])\n", + " y = tfms(y)\n", + " y = y.detach().cpu().numpy()\n", + " y = y.reshape(-1, 120 * 120)\n", + "\n", + " y_hat = torch.cat([x[\"y_hat\"] for x in outputs])\n", + " y_hat = tfms(y_hat)\n", + " y_hat = y_hat.detach().cpu().numpy()\n", + " y_hat = y_hat.reshape(-1, 120 * 120)\n", + "\n", + " y = 255.0 * y[:, args[\"dams\"]]\n", + " y = np.round(y).clip(0, 255)\n", + " y_hat = 255.0 * y_hat[:, args[\"dams\"]]\n", + " y_hat = np.round(y_hat).clip(0, 255)\n", + " # mae = metrics.mean_absolute_error(y, y_hat)\n", + "\n", + " y_true = radar2precipitation(y)\n", + " y_true = np.where(y_true >= 0.1, 1, 0)\n", + " y_pred = radar2precipitation(y_hat)\n", + " y_pred = np.where(y_pred >= 0.1, 1, 0)\n", + "\n", + " y = y * y_true\n", + " y_hat = y_hat * y_true\n", + " # mae = np.abs(y - y_hat).sum() / y_true.sum()\n", + " mae = np.abs(y - y_hat).mean()\n", + "\n", + " tn, fp, fn, tp = metrics.confusion_matrix(\n", + " y_true.reshape(-1), y_pred.reshape(-1)\n", + " ).ravel()\n", + " csi = tp / (tp + fn + fp)\n", + "\n", + " comp_metric = mae / (csi + 1e-12)\n", + "\n", + " print(\n", + " f\"Epoch {self.current_epoch} | MAE/CSI: {comp_metric} | MAE: {mae} | CSI: {csi} | Loss: {avg_loss}\"\n", + " )\n", + "\n", + " def configure_optimizers(self):\n", + " optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)\n", + " scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(\n", + " optimizer, T_max=self.num_train_steps\n", + " )\n", + " return [optimizer], [{\"scheduler\": scheduler, \"interval\": \"step\"}]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Train" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "scrolled": false + }, + "outputs": [ { - "name": "stdout", + "name": "stderr", "output_type": "stream", "text": [ - "Epoch 38 | MAE: 0.011445727199316025\n" + "GPU available: True, used: True\n", + "TPU available: False, using: 0 TPU cores\n", + "LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [0]\n", + "Using native 16bit precision.\n", + "\n", + " | Name | Type | Params\n", + "-----------------------------------------\n", + "0 | criterion | L1Loss | 0 \n", + "1 | encoder | Encoder | 18 M \n", + "2 | decoder | Decoder | 15 M \n", + "3 | out | Sequential | 1 K \n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "b8ae8ba9e62f4f1f88f99ba5354cf362", + "model_id": "101c9fa38ccc4e998ca991d319623ae6", "version_major": 2, "version_minor": 0 }, "text/plain": [ - "HBox(children=(HTML(value='Validating'), FloatProgress(value=1.0, bar_style='info', layout=Layout(flex='2'), m…" + "HBox(children=(HTML(value='Validation sanity check'), FloatProgress(value=1.0, bar_style='info', layout=Layout…" ] }, "metadata": {}, "output_type": "display_data" }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 39 | MAE: 0.011705679818987846\n" + "ename": "RuntimeError", + "evalue": "Given groups=1, weight of size [512, 1536, 3, 3], expected input[128, 1024, 16, 16] to have 1536 channels, but got 1024 channels instead", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 32\u001b[0m )\n\u001b[1;32m 33\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 34\u001b[0;31m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdatamodule\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 35\u001b[0m \u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msave_checkpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"rainnet_fold{fold}_bs64_epoch50.ckpt\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 36\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/trainer/trainer.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, model, train_dataloader, val_dataloaders, datamodule)\u001b[0m\n\u001b[1;32m 438\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcall_hook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'on_fit_start'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 439\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 440\u001b[0;31m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maccelerator_backend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 441\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maccelerator_backend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mteardown\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 442\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/accelerators/gpu_accelerator.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;31m# train or test\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_or_test\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresults\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/accelerators/accelerator.py\u001b[0m in \u001b[0;36mtrain_or_test\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 66\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_test\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 67\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 68\u001b[0;31m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 69\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresults\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 70\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/trainer/trainer.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 460\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 461\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 462\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_sanity_check\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 463\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 464\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcheckpoint_connector\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhas_trained\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/trainer/trainer.py\u001b[0m in \u001b[0;36mrun_sanity_check\u001b[0;34m(self, ref_model)\u001b[0m\n\u001b[1;32m 648\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 649\u001b[0m \u001b[0;31m# run eval step\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 650\u001b[0;31m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0meval_results\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_evaluation\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtest_mode\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmax_batches\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_sanity_val_batches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 651\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 652\u001b[0m \u001b[0;31m# allow no returns from eval\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/trainer/trainer.py\u001b[0m in \u001b[0;36mrun_evaluation\u001b[0;34m(self, test_mode, max_batches)\u001b[0m\n\u001b[1;32m 568\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 569\u001b[0m \u001b[0;31m# lightning module methods\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 570\u001b[0;31m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mevaluation_loop\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mevaluation_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtest_mode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_idx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdataloader_idx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 571\u001b[0m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mevaluation_loop\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mevaluation_step_end\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0moutput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 572\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/trainer/evaluation_loop.py\u001b[0m in \u001b[0;36mevaluation_step\u001b[0;34m(self, test_mode, batch, batch_idx, dataloader_idx)\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maccelerator_backend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtest_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 170\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 171\u001b[0;31m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maccelerator_backend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalidation_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 172\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0;31m# track batch size for weighted average\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/accelerators/gpu_accelerator.py\u001b[0m in \u001b[0;36mvalidation_step\u001b[0;34m(self, args)\u001b[0m\n\u001b[1;32m 74\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mamp_backend\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0mAMPType\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mNATIVE\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 75\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mamp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mautocast\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 76\u001b[0;31m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__validation_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 77\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 78\u001b[0m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__validation_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/pytorch_lightning/accelerators/gpu_accelerator.py\u001b[0m in \u001b[0;36m__validation_step\u001b[0;34m(self, args)\u001b[0m\n\u001b[1;32m 84\u001b[0m \u001b[0mbatch\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_device\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 85\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 86\u001b[0;31m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrainer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalidation_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 87\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0moutput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 88\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mvalidation_step\u001b[0;34m(self, batch, batch_idx)\u001b[0m\n\u001b[1;32m 46\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mvalidation_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_idx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 48\u001b[0;31m \u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_hat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshared_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_idx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 49\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m\"loss\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"y\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"y_hat\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0my_hat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mshared_step\u001b[0;34m(self, batch, batch_idx)\u001b[0m\n\u001b[1;32m 36\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mshared_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_idx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 38\u001b[0;31m \u001b[0my_hat\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 39\u001b[0m \u001b[0mloss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcriterion\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my_hat\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 40\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_hat\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[0mftrs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 31\u001b[0m \u001b[0mftrs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mftrs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 32\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdecoder\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mftrs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mftrs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 33\u001b[0m \u001b[0mout\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 34\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mout\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x, ftrs)\u001b[0m\n\u001b[1;32m 54\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mups\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 55\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mftrs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdim\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 56\u001b[0;31m \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconvs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 57\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 14\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnet\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 15\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/container.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 116\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mmodule\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 117\u001b[0;31m \u001b[0minput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodule\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 118\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 119\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 725\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 727\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 728\u001b[0m for hook in itertools.chain(\n\u001b[1;32m 729\u001b[0m \u001b[0m_global_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/conv.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 421\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 422\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 423\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_conv_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 424\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 425\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0mConv3d\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_ConvNd\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.pyenv/versions/miniconda3-latest/envs/torch/lib/python3.7/site-packages/torch/nn/modules/conv.py\u001b[0m in \u001b[0;36m_conv_forward\u001b[0;34m(self, input, weight)\u001b[0m\n\u001b[1;32m 418\u001b[0m _pair(0), self.dilation, self.groups)\n\u001b[1;32m 419\u001b[0m return F.conv2d(input, weight, self.bias, self.stride,\n\u001b[0;32m--> 420\u001b[0;31m self.padding, self.dilation, self.groups)\n\u001b[0m\u001b[1;32m 421\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 422\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mRuntimeError\u001b[0m: Given groups=1, weight of size [512, 1536, 3, 3], expected input[128, 1024, 16, 16] to have 1536 channels, but got 1024 channels instead" ] }, - { - "data": { - "text/plain": [ - "1" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "application/javascript": [ - "\n", - " setTimeout(function() {\n", - " var nbb_cell_id = 22;\n", - " var nbb_unformatted_code = \"trainer.fit(model, datamodule)\";\n", - " var nbb_formatted_code = \"trainer.fit(model, datamodule)\";\n", - " var nbb_cells = Jupyter.notebook.get_cells();\n", - " for (var i = 0; i < nbb_cells.length; ++i) {\n", - " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", - " if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n", - " nbb_cells[i].set_text(nbb_formatted_code);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }, 500);\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "trainer.fit(model, datamodule)" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ { "data": { "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 24;\n", - " var nbb_unformatted_code = \"trainer.save_checkpoint(\\\"rainnet_fold0_bs64_epoch40.ckpt\\\")\";\n", - " var nbb_formatted_code = \"trainer.save_checkpoint(\\\"rainnet_fold0_bs64_epoch40.ckpt\\\")\";\n", + " var nbb_cell_id = 12;\n", + " var nbb_unformatted_code = \"df = pd.read_csv(args[\\\"train_folds_csv\\\"])\\n\\nfor fold in range(5):\\n train_df = df[df.fold != fold]\\n val_df = df[df.fold == fold]\\n \\n datamodule = NowcastingDataModule(\\n train_df, val_df, batch_size=args[\\\"batch_size\\\"], num_workers=args[\\\"num_workers\\\"]\\n )\\n datamodule.setup()\\n\\n num_train_steps = (\\n int(\\n np.ceil(\\n len(train_df)\\n // args[\\\"batch_size\\\"]\\n / args[\\\"gradient_accumulation_steps\\\"]\\n )\\n )\\n * args[\\\"max_epochs\\\"]\\n )\\n \\n model = RainNet(num_train_steps=num_train_steps)\\n \\n trainer = pl.Trainer(\\n gpus=args[\\\"gpus\\\"],\\n max_epochs=args[\\\"max_epochs\\\"],\\n precision=args[\\\"precision\\\"],\\n progress_bar_refresh_rate=50,\\n benchmark=True,\\n auto_lr_find=True,\\n )\\n\\n trainer.fit(model, datamodule)\\n trainer.save_checkpoint(f\\\"rainnet_fold{fold}_bs64_epoch50.ckpt\\\")\\n\\n del datamodule, model, trainer\\n gc.collect()\\n torch.cuda.empty_cache()\\n break\";\n", + " var nbb_formatted_code = \"df = pd.read_csv(args[\\\"train_folds_csv\\\"])\\n\\nfor fold in range(5):\\n train_df = df[df.fold != fold]\\n val_df = df[df.fold == fold]\\n\\n datamodule = NowcastingDataModule(\\n train_df, val_df, batch_size=args[\\\"batch_size\\\"], num_workers=args[\\\"num_workers\\\"]\\n )\\n datamodule.setup()\\n\\n num_train_steps = (\\n int(\\n np.ceil(\\n len(train_df)\\n // args[\\\"batch_size\\\"]\\n / args[\\\"gradient_accumulation_steps\\\"]\\n )\\n )\\n * args[\\\"max_epochs\\\"]\\n )\\n\\n model = RainNet(num_train_steps=num_train_steps)\\n\\n trainer = pl.Trainer(\\n gpus=args[\\\"gpus\\\"],\\n max_epochs=args[\\\"max_epochs\\\"],\\n precision=args[\\\"precision\\\"],\\n progress_bar_refresh_rate=50,\\n benchmark=True,\\n auto_lr_find=True,\\n )\\n\\n trainer.fit(model, datamodule)\\n trainer.save_checkpoint(f\\\"rainnet_fold{fold}_bs64_epoch50.ckpt\\\")\\n\\n del datamodule, model, trainer\\n gc.collect()\\n torch.cuda.empty_cache()\\n break\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -2264,7 +884,46 @@ } ], "source": [ - "trainer.save_checkpoint(\"rainnet_fold0_bs64_epoch40.ckpt\")" + "df = pd.read_csv(args[\"train_folds_csv\"])\n", + "\n", + "for fold in range(5):\n", + " train_df = df[df.fold != fold]\n", + " val_df = df[df.fold == fold]\n", + "\n", + " datamodule = NowcastingDataModule(\n", + " train_df, val_df, batch_size=args[\"batch_size\"], num_workers=args[\"num_workers\"]\n", + " )\n", + " datamodule.setup()\n", + "\n", + " num_train_steps = (\n", + " int(\n", + " np.ceil(\n", + " len(train_df)\n", + " // args[\"batch_size\"]\n", + " / args[\"gradient_accumulation_steps\"]\n", + " )\n", + " )\n", + " * args[\"max_epochs\"]\n", + " )\n", + "\n", + " model = RainNet(num_train_steps=num_train_steps)\n", + "\n", + " trainer = pl.Trainer(\n", + " gpus=args[\"gpus\"],\n", + " max_epochs=args[\"max_epochs\"],\n", + " precision=args[\"precision\"],\n", + " progress_bar_refresh_rate=50,\n", + " benchmark=True,\n", + " auto_lr_find=True,\n", + " )\n", + "\n", + " trainer.fit(model, datamodule)\n", + " trainer.save_checkpoint(f\"rainnet_fold{fold}_bs64_epoch50.ckpt\")\n", + "\n", + " del datamodule, model, trainer\n", + " gc.collect()\n", + " torch.cuda.empty_cache()\n", + " break" ] }, { @@ -2276,17 +935,144 @@ }, { "cell_type": "code", - "execution_count": 14, - "metadata": {}, + "execution_count": 11, + "metadata": { + "scrolled": true + }, "outputs": [ + { + "data": { + "text/plain": [ + "RainNet(\n", + " (criterion): L1Loss()\n", + " (encoder): Encoder(\n", + " (blocks): ModuleList(\n", + " (0): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(4, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (1): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (2): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (3): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (4): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(512, 1024, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " )\n", + " (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n", + " (dropout): Dropout(p=0.5, inplace=False)\n", + " )\n", + " (decoder): Decoder(\n", + " (ups): ModuleList(\n", + " (0): Upsample(scale_factor=2.0, mode=nearest)\n", + " (1): Upsample(scale_factor=2.0, mode=nearest)\n", + " (2): Upsample(scale_factor=2.0, mode=nearest)\n", + " (3): Upsample(scale_factor=2.0, mode=nearest)\n", + " )\n", + " (convs): ModuleList(\n", + " (0): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(1536, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (1): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(768, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (2): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(384, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " (3): Block(\n", + " (net): Sequential(\n", + " (0): Conv2d(192, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (4): ReLU(inplace=True)\n", + " (5): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " )\n", + " )\n", + " )\n", + " )\n", + " (out): Sequential(\n", + " (0): Conv2d(64, 2, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): ReLU(inplace=True)\n", + " (2): BatchNorm2d(2, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(2, 1, kernel_size=(1, 1), stride=(1, 1))\n", + " (4): ReLU(inplace=True)\n", + " )\n", + ")" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + }, { "data": { "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 14;\n", - " var nbb_unformatted_code = \"model = RainNet.load_from_checkpoint(\\\"rainnet_fold0_bs64_epoch40.ckpt\\\")\";\n", - " var nbb_formatted_code = \"model = RainNet.load_from_checkpoint(\\\"rainnet_fold0_bs64_epoch40.ckpt\\\")\";\n", + " var nbb_cell_id = 11;\n", + " var nbb_unformatted_code = \"model = RainNet.load_from_checkpoint(\\\"rainnet_fold0_bs64_epoch50.ckpt\\\")\\nmodel.to(\\\"cuda\\\")\";\n", + " var nbb_formatted_code = \"model = RainNet.load_from_checkpoint(\\\"rainnet_fold0_bs64_epoch50.ckpt\\\")\\nmodel.to(\\\"cuda\\\")\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -2308,12 +1094,13 @@ } ], "source": [ - "model = RainNet.load_from_checkpoint(\"rainnet_fold0_bs64_epoch40.ckpt\")" + "model = RainNet.load_from_checkpoint(\"rainnet_fold0_bs64_epoch50.ckpt\")\n", + "model.to(\"cuda\")" ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 13, "metadata": {}, "outputs": [ { @@ -2321,9 +1108,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 15;\n", - " var nbb_unformatted_code = \"datamodule = NowcastingDataModule(df, fold=0, batch_size=128)\\ndatamodule.setup(\\\"test\\\")\";\n", - " var nbb_formatted_code = \"datamodule = NowcastingDataModule(df, fold=0, batch_size=128)\\ndatamodule.setup(\\\"test\\\")\";\n", + " var nbb_cell_id = 13;\n", + " var nbb_unformatted_code = \"datamodule = NowcastingDataModule(train_df, val_df, batch_size=2 * args[\\\"batch_size\\\"])\\ndatamodule.setup(\\\"test\\\")\";\n", + " var nbb_formatted_code = \"datamodule = NowcastingDataModule(train_df, val_df, batch_size=2 * args[\\\"batch_size\\\"])\\ndatamodule.setup(\\\"test\\\")\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -2345,13 +1132,13 @@ } ], "source": [ - "datamodule = NowcastingDataModule(df, fold=0, batch_size=128)\n", + "datamodule = NowcastingDataModule(train_df, val_df, batch_size=2 * args[\"batch_size\"])\n", "datamodule.setup(\"test\")" ] }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 14, "metadata": {}, "outputs": [ { @@ -2359,9 +1146,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 16;\n", - " var nbb_unformatted_code = \"preds = []\\nmodel.to(\\\"cuda\\\")\\nmodel.eval()\\nwith torch.no_grad():\\n for batch in datamodule.test_dataloader():\\n batch = batch.to(\\\"cuda\\\")\\n imgs = model(batch)\\n imgs = imgs.detach().cpu().numpy()\\n imgs = imgs[:, 0, 4:124, 4:124]\\n imgs = 255.0 * imgs\\n imgs = np.round(imgs)\\n imgs = np.clip(imgs, 0, 255)\\n preds.append(imgs)\\n\\npreds = np.concatenate(preds)\\npreds = preds.astype(np.uint8)\\npreds = preds.reshape(len(preds), -1)\";\n", - " var nbb_formatted_code = \"preds = []\\nmodel.to(\\\"cuda\\\")\\nmodel.eval()\\nwith torch.no_grad():\\n for batch in datamodule.test_dataloader():\\n batch = batch.to(\\\"cuda\\\")\\n imgs = model(batch)\\n imgs = imgs.detach().cpu().numpy()\\n imgs = imgs[:, 0, 4:124, 4:124]\\n imgs = 255.0 * imgs\\n imgs = np.round(imgs)\\n imgs = np.clip(imgs, 0, 255)\\n preds.append(imgs)\\n\\npreds = np.concatenate(preds)\\npreds = preds.astype(np.uint8)\\npreds = preds.reshape(len(preds), -1)\";\n", + " var nbb_cell_id = 14;\n", + " var nbb_unformatted_code = \"preds = []\\nmodel.eval()\\nwith torch.no_grad():\\n for batch in datamodule.test_dataloader():\\n batch = batch.to(\\\"cuda\\\")\\n imgs = model(batch)\\n imgs = imgs.detach().cpu().numpy()\\n imgs = imgs[:, 0, 4:124, 4:124]\\n imgs = 255.0 * imgs\\n imgs = np.round(imgs)\\n imgs = np.clip(imgs, 0, 255)\\n preds.append(imgs)\\n\\npreds = np.concatenate(preds)\\npreds = preds.astype(np.uint8)\\npreds = preds.reshape(len(preds), -1)\";\n", + " var nbb_formatted_code = \"preds = []\\nmodel.eval()\\nwith torch.no_grad():\\n for batch in datamodule.test_dataloader():\\n batch = batch.to(\\\"cuda\\\")\\n imgs = model(batch)\\n imgs = imgs.detach().cpu().numpy()\\n imgs = imgs[:, 0, 4:124, 4:124]\\n imgs = 255.0 * imgs\\n imgs = np.round(imgs)\\n imgs = np.clip(imgs, 0, 255)\\n preds.append(imgs)\\n\\npreds = np.concatenate(preds)\\npreds = preds.astype(np.uint8)\\npreds = preds.reshape(len(preds), -1)\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -2384,10 +1171,9 @@ ], "source": [ "preds = []\n", - "model.to(\"cuda\")\n", "model.eval()\n", "with torch.no_grad():\n", - " for batch in datamodule.test_dataloader():\n", + " for batch in tqdm(datamodule.test_dataloader()):\n", " batch = batch.to(\"cuda\")\n", " imgs = model(batch)\n", " imgs = imgs.detach().cpu().numpy()\n", @@ -2404,7 +1190,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -2412,7 +1198,7 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 17;\n", + " var nbb_cell_id = 15;\n", " var nbb_unformatted_code = \"test_paths = datamodule.test_dataset.paths\\ntest_filenames = [path.name for path in test_paths]\";\n", " var nbb_formatted_code = \"test_paths = datamodule.test_dataset.paths\\ntest_filenames = [path.name for path in test_paths]\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", @@ -2442,13 +1228,13 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 16, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "8bf0d67ec724490ebd20d0a99253ad11", + "model_id": "67aa02094dbe4d8f8ddc835f0c523658", "version_major": 2, "version_minor": 0 }, @@ -2471,7 +1257,7 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 18;\n", + " var nbb_cell_id = 16;\n", " var nbb_unformatted_code = \"subm = pd.DataFrame()\\nsubm[\\\"file_name\\\"] = test_filenames\\nfor i in tqdm(range(14400)):\\n subm[str(i)] = preds[:, i]\";\n", " var nbb_formatted_code = \"subm = pd.DataFrame()\\nsubm[\\\"file_name\\\"] = test_filenames\\nfor i in tqdm(range(14400)):\\n subm[str(i)] = preds[:, i]\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", @@ -2503,7 +1289,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 17, "metadata": {}, "outputs": [ { @@ -2562,7 +1348,7 @@ " 0\n", " 0\n", " 0\n", - " 0\n", + " 8\n", " ...\n", " 0\n", " 0\n", @@ -2678,7 +1464,7 @@ ], "text/plain": [ " file_name 0 1 2 3 4 5 6 7 8 ... 14390 14391 14392 14393 \\\n", - "0 test_00402.npy 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 \n", + "0 test_00402.npy 0 0 0 0 0 0 0 0 8 ... 0 0 0 0 \n", "1 test_00365.npy 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 \n", "2 test_00122.npy 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 \n", "3 test_01822.npy 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 \n", @@ -2694,7 +1480,7 @@ "[5 rows x 14401 columns]" ] }, - "execution_count": 19, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" }, @@ -2703,9 +1489,9 @@ "application/javascript": [ "\n", " setTimeout(function() {\n", - " var nbb_cell_id = 19;\n", - " var nbb_unformatted_code = \"subm.to_csv(\\\"rainnet_fold0_epoch40.csv\\\", index=False)\\nsubm.head()\";\n", - " var nbb_formatted_code = \"subm.to_csv(\\\"rainnet_fold0_epoch40.csv\\\", index=False)\\nsubm.head()\";\n", + " var nbb_cell_id = 17;\n", + " var nbb_unformatted_code = \"subm.to_csv(\\\"rainnet_fold0_epoch50.csv\\\", index=False)\\nsubm.head()\";\n", + " var nbb_formatted_code = \"subm.to_csv(\\\"rainnet_fold0_epoch50.csv\\\", index=False)\\nsubm.head()\";\n", " var nbb_cells = Jupyter.notebook.get_cells();\n", " for (var i = 0; i < nbb_cells.length; ++i) {\n", " if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n", @@ -2727,7 +1513,7 @@ } ], "source": [ - "subm.to_csv(\"rainnet_fold0_epoch40.csv\", index=False)\n", + "subm.to_csv(\"rainnet_fold0_epoch50.csv\", index=False)\n", "subm.head()" ] }, @@ -2737,9 +1523,7 @@ "metadata": {}, "outputs": [], "source": [ - "# test_paths = list((PATH / \"test-128\").glob(\"*.npy\"))\n", - "# test_dataset = NowcastingDataset(paths, test=True)\n", - "# test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=256, num_workers=4, pin_memory=True)" + "del model" ] }, { @@ -2763,13 +1547,6 @@ "outputs": [], "source": [] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": null, @@ -2794,7 +1571,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.7.8" } }, "nbformat": 4, diff --git a/src/optim/__init__.py b/src/optim/__init__.py index e69de29..62c6730 100644 --- a/src/optim/__init__.py +++ b/src/optim/__init__.py @@ -0,0 +1,4 @@ +from .adamp import AdamP +from .radam import RAdam +from .lookahead import Lookahead +from .sgdp import SGDP