Skip to content

Commit

Permalink
Adds basic layers (#80)
Browse files Browse the repository at this point in the history
* adds layers

* updates

* super init

* small modification to MLP
  • Loading branch information
oke-aditya authored Dec 10, 2020
1 parent c574986 commit 4a03d9c
Show file tree
Hide file tree
Showing 5 changed files with 82 additions and 0 deletions.
3 changes: 3 additions & 0 deletions quickvision/layers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from quickvision.layers import functional
from quickvision.layers.act_mish import Mish
from quickvision.layers.block_mlp import MLP
28 changes: 28 additions & 0 deletions quickvision/layers/act_mish.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import torch
import torch.nn as nn
import torch.nn.functional as F
from quickvision.layers.functional import mish

__all__ = ["Mish"]


class Mish(nn.Module):
"""
Applies the mish function element-wise:
mish(x) = x * tanh(softplus(x)) = x * tanh(ln(1 + exp(x)))
Shape:
- Input: (N, *) where * means, any number of additional
dimensions
- Output: (N, *), same shape as the input
Examples:
>>> m = Mish()
>>> input = torch.randn(2)
>>> output = m(input)
"""

def __init__(self, inplace: bool = False):
super().__init__()
self.inplace = inplace

def forward(self, x):
return mish(x, inplace=self.inplace)
30 changes: 30 additions & 0 deletions quickvision/layers/block_mlp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import torch
import torch.nn as nn
import torch.nn.functional as F


__all__ = ["MLP"]


class MLP(nn.Module):
"""
A very simple Multi Layered Perceptron classifier.
Args:
in_features (int): Input features of Network
hidden_features (int): Intermediate Features of network
out_features (int): Output layers of network.
"""

def __init__(self, in_features: int, hidden_features: int, proj_features: int):
super().__init__()
self.in_features = in_features
self.proj_features = proj_features
self.hidden_features = hidden_features
self.l1 = nn.Linear(self.in_features, self.hidden_features)
self.l2 = nn.Linear(self.hidden_features, self.proj_features)

def forward(self, x):
x = self.l1(x)
x = F.relu(x)
x = self.l2(x)
return (x)
1 change: 1 addition & 0 deletions quickvision/layers/functional/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from quickvision.layers.functional.act_mish import mish
20 changes: 20 additions & 0 deletions quickvision/layers/functional/act_mish.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Taken from Mish author himself https://github.com/digantamisra98/Mish/

import torch
import torch.nn as nn
import torch.nn.functional as F

__all__ = ["mish"]


@torch.jit.script
def mish(x, inplace: bool = False):
"""
Applies the mish function element-wise:
Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681
mish(x) = x * tanh(softplus(x)) = x * tanh(ln(1 + exp(x)))
"""
if inplace:
return x.mul_(F.tanh(F.softplus(x)))
else:
return x.mul(F.tanh(F.softplus(x)))

0 comments on commit 4a03d9c

Please sign in to comment.