Skip to content

Commit

Permalink
[PYTORCH]Activations for pytorch
Browse files Browse the repository at this point in the history
  • Loading branch information
siju-samuel committed Mar 31, 2020
1 parent 430cb89 commit 2fda4d6
Show file tree
Hide file tree
Showing 2 changed files with 82 additions and 0 deletions.
31 changes: 31 additions & 0 deletions python/tvm/relay/frontend/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,33 @@ def _impl(inputs, input_types):
return _op.nn.relu(data)
return _impl

def _prelu():
def _impl(inputs, input_types):
data = inputs[0]
alpha = inputs[1]
return _op.nn.prelu(data, alpha)
return _impl

def _leaky_relu():
def _impl(inputs, input_types):
data = inputs[0]
alpha = int(inputs[1])
return _op.nn.leaky_relu(data, alpha)
return _impl

def _elu():
def _impl(inputs, input_types):
data = inputs[0]
alpha = _expr.const(int(inputs[1]), dtype='float32')
return alpha * _op.nn.relu(alpha - _op.exp(data)) + _op.nn.relu(data)
return _impl

def _log_sigmoid():
def _impl(inputs, input_types):
data = inputs[0]
return _op.log(_op.tensor.sigmoid(data))
return _impl

def _adaptive_avg_pool_2d():
def _impl(inputs, input_types):
data = inputs[0]
Expand Down Expand Up @@ -921,6 +948,10 @@ def _wrap_const(c):
"aten::select" : _select(),
"aten::relu" : _relu(),
"aten::relu_" : _relu(),
"aten::prelu" : _prelu(),
"aten::leaky_relu" : _leaky_relu(),
"aten::elu" : _elu(),
"aten::log_sigmoid" : _log_sigmoid(),
"aten::adaptive_avg_pool2d" : _adaptive_avg_pool_2d(),
"aten::adaptive_max_pool2d" : _adaptive_max_pool_2d(),
"aten::max_pool2d" : _maxpool_2d(),
Expand Down
51 changes: 51 additions & 0 deletions tests/python/frontend/pytorch/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,6 +335,53 @@ def forward(self, *args):
input_data = torch.rand(input_shape).float()
verify_model(ReLU1().float().eval(), input_data=input_data)

def test_forward_prelu():
torch.set_grad_enabled(False)
input_shape = [1, 3, 10, 10]

class PReLU1(Module):
def __init__(self):
super(PReLU1, self).__init__()
self.prelu = torch.nn.PReLU(num_parameters=3)
def forward(self, *args):
return self.prelu(args[0])

input_data = torch.rand(input_shape).float()
verify_model(PReLU1().float().eval(), input_data=input_data)

def test_forward_leakyrelu():
torch.set_grad_enabled(False)
input_shape = [10, 10]

class LeakyReLU1(Module):
def forward(self, *args):
return torch.nn.LeakyReLU(negative_slope=0.05)(args[0])

input_data = torch.rand(input_shape).float()
verify_model(LeakyReLU1().float().eval(), input_data=input_data)

def test_forward_elu():
torch.set_grad_enabled(False)
input_shape = [10, 10]

class ELU1(Module):
def forward(self, *args):
return torch.nn.ELU(alpha=1.3)(args[0])

input_data = torch.rand(input_shape).float()
verify_model(ELU1().float().eval(), input_data=input_data)

def test_forward_log_sigmoid():
torch.set_grad_enabled(False)
input_shape = [10, 10]

class LogSigmoid1(Module):
def forward(self, *args):
return torch.nn.LogSigmoid()(args[0])

input_data = torch.rand(input_shape).float()
verify_model(LogSigmoid1().float().eval(), input_data=input_data)

def test_forward_adaptiveavgpool():
torch.set_grad_enabled(False)
input_shape = [1, 3, 10, 10]
Expand Down Expand Up @@ -1076,6 +1123,10 @@ def forward(self, xs):
test_forward_unsqueeze()
test_forward_concatenate()
test_forward_relu()
test_forward_prelu()
test_forward_leakyrelu()
test_forward_elu()
test_forward_log_sigmoid()
test_forward_adaptiveavgpool()
test_forward_maxpool2d()
test_forward_maxpool1d()
Expand Down

0 comments on commit 2fda4d6

Please sign in to comment.