From f8b62a3f58990b53645967d623fcde209229fa07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=8E=E5=B0=B9=E7=A1=95?= Date: Tue, 16 Mar 2021 21:28:05 +0800 Subject: [PATCH 1/2] Add model: multilayer perceptrons. --- mmedit/models/components/refiners/__init__.py | 3 +- mmedit/models/components/refiners/mlp.py | 58 +++++++++++++++++++ tests/test_mlp.py | 32 ++++++++++ 3 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 mmedit/models/components/refiners/mlp.py create mode 100644 tests/test_mlp.py diff --git a/mmedit/models/components/refiners/__init__.py b/mmedit/models/components/refiners/__init__.py index 6f089bfbe3..15c1238822 100644 --- a/mmedit/models/components/refiners/__init__.py +++ b/mmedit/models/components/refiners/__init__.py @@ -1,4 +1,5 @@ from .deepfill_refiner import DeepFillRefiner +from .mlp import MLP from .plain_refiner import PlainRefiner -__all__ = ['PlainRefiner', 'DeepFillRefiner'] +__all__ = ['PlainRefiner', 'DeepFillRefiner', 'MLP'] diff --git a/mmedit/models/components/refiners/mlp.py b/mmedit/models/components/refiners/mlp.py new file mode 100644 index 0000000000..cf9172179e --- /dev/null +++ b/mmedit/models/components/refiners/mlp.py @@ -0,0 +1,58 @@ +import torch.nn as nn +from mmcv.runner import load_checkpoint + +from mmedit.models.registry import COMPONENTS +from mmedit.utils import get_root_logger + + +@COMPONENTS.register_module() +class MLP(nn.Module): + """Multilayer perceptrons (MLPs). + + Args: + in_dim (int): Input dimension. + out_dim (int): Output dimension. + hidden_list (list[int]): List of hidden dimensions. + """ + + def __init__(self, in_dim, out_dim, hidden_list): + super().__init__() + layers = [] + lastv = in_dim + for hidden in hidden_list: + layers.append(nn.Linear(lastv, hidden)) + layers.append(nn.ReLU()) + lastv = hidden + layers.append(nn.Linear(lastv, out_dim)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + """Forward function. + + Args: + x (Tensor): The input of MLP. + + Returns: + Tensor: The output of MLP. + """ + shape = x.shape[:-1] + x = self.layers(x.view(-1, x.shape[-1])) + return x.view(*shape, -1) + + def init_weights(self, pretrained=None, strict=True): + """Init weights for models. + + Args: + pretrained (str, optional): Path for pretrained weights. If given + None, pretrained weights will not be loaded. Defaults to None. + strict (boo, optional): Whether strictly load the pretrained model. + Defaults to True. + """ + if isinstance(pretrained, str): + logger = get_root_logger() + load_checkpoint(self, pretrained, strict=strict, logger=logger) + elif pretrained is None: + pass + else: + raise TypeError(f'"pretrained" must be a str or None. ' + f'But received {type(pretrained)}.') diff --git a/tests/test_mlp.py b/tests/test_mlp.py new file mode 100644 index 0000000000..3159f9a2bf --- /dev/null +++ b/tests/test_mlp.py @@ -0,0 +1,32 @@ +import torch +import torch.nn as nn + +from mmedit.models.builder import build_component + + +def test_mlp(): + model_cfg = dict(type='MLP', in_dim=8, out_dim=3, hidden_list=[8, 8, 8, 8]) + mlp = build_component(model_cfg) + + # test attributes + assert mlp.__class__.__name__ == 'MLP' + + # prepare data + inputs = torch.rand(2, 8) + targets = torch.rand(2, 3) + if torch.cuda.is_available(): + inputs = inputs.cuda() + targets = targets.cuda() + mlp = mlp.cuda() + data_batch = {'in': inputs, 'target': targets} + # prepare optimizer + criterion = nn.L1Loss() + optimizer = torch.optim.Adam(mlp.parameters(), lr=1e-4) + + # test train_step + output = mlp.forward(data_batch['in']) + assert output.shape == data_batch['target'].shape + loss = criterion(output, data_batch['target']) + optimizer.zero_grad() + loss.backward() + optimizer.step() From bf55d3478d4232973568beeadca119aca59642eb Mon Sep 17 00:00:00 2001 From: liyinshuo Date: Thu, 25 Mar 2021 10:48:07 +0800 Subject: [PATCH 2/2] Rename mlp, from 'MLP' to 'MLPRefiner.' --- mmedit/models/components/refiners/__init__.py | 4 ++-- .../models/components/refiners/{mlp.py => mlp_refiner.py} | 4 ++-- tests/{test_mlp.py => test_mlp_refiner.py} | 7 ++++--- 3 files changed, 8 insertions(+), 7 deletions(-) rename mmedit/models/components/refiners/{mlp.py => mlp_refiner.py} (95%) rename tests/{test_mlp.py => test_mlp_refiner.py} (81%) diff --git a/mmedit/models/components/refiners/__init__.py b/mmedit/models/components/refiners/__init__.py index 15c1238822..3ddf6f0bdd 100644 --- a/mmedit/models/components/refiners/__init__.py +++ b/mmedit/models/components/refiners/__init__.py @@ -1,5 +1,5 @@ from .deepfill_refiner import DeepFillRefiner -from .mlp import MLP +from .mlp_refiner import MLPRefiner from .plain_refiner import PlainRefiner -__all__ = ['PlainRefiner', 'DeepFillRefiner', 'MLP'] +__all__ = ['PlainRefiner', 'DeepFillRefiner', 'MLPRefiner'] diff --git a/mmedit/models/components/refiners/mlp.py b/mmedit/models/components/refiners/mlp_refiner.py similarity index 95% rename from mmedit/models/components/refiners/mlp.py rename to mmedit/models/components/refiners/mlp_refiner.py index cf9172179e..1966f6b25b 100644 --- a/mmedit/models/components/refiners/mlp.py +++ b/mmedit/models/components/refiners/mlp_refiner.py @@ -6,8 +6,8 @@ @COMPONENTS.register_module() -class MLP(nn.Module): - """Multilayer perceptrons (MLPs). +class MLPRefiner(nn.Module): + """Multilayer perceptrons (MLPs), refiner used in LIIF. Args: in_dim (int): Input dimension. diff --git a/tests/test_mlp.py b/tests/test_mlp_refiner.py similarity index 81% rename from tests/test_mlp.py rename to tests/test_mlp_refiner.py index 3159f9a2bf..5c93436fbc 100644 --- a/tests/test_mlp.py +++ b/tests/test_mlp_refiner.py @@ -4,12 +4,13 @@ from mmedit.models.builder import build_component -def test_mlp(): - model_cfg = dict(type='MLP', in_dim=8, out_dim=3, hidden_list=[8, 8, 8, 8]) +def test_mlp_refiner(): + model_cfg = dict( + type='MLPRefiner', in_dim=8, out_dim=3, hidden_list=[8, 8, 8, 8]) mlp = build_component(model_cfg) # test attributes - assert mlp.__class__.__name__ == 'MLP' + assert mlp.__class__.__name__ == 'MLPRefiner' # prepare data inputs = torch.rand(2, 8)