Skip to content

Commit

Permalink
[RELAY][OP]log_softmax op (apache#1857)
Browse files Browse the repository at this point in the history
  • Loading branch information
siju-samuel authored and tqchen committed Oct 8, 2018
1 parent 4fd3604 commit fe035db
Show file tree
Hide file tree
Showing 4 changed files with 66 additions and 0 deletions.
2 changes: 2 additions & 0 deletions docs/langref/relay_op.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ This level enables fully connected multi-layer perceptron.
tvm.relay.expand_dims
tvm.relay.concatenate
tvm.relay.nn.softmax
tvm.relay.nn.log_softmax
tvm.relay.subtract
tvm.relay.multiply
tvm.relay.divide
Expand Down Expand Up @@ -114,6 +115,7 @@ Level 1 Definitions
.. autofunction:: tvm.relay.sigmoid
.. autofunction:: tvm.relay.concatenate
.. autofunction:: tvm.relay.nn.softmax
.. autofunction:: tvm.relay.nn.log_softmax


Level 2 Definitions
Expand Down
22 changes: 22 additions & 0 deletions python/tvm/relay/op/nn/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,28 @@ def softmax(data, axis):
return _make.softmax(data, axis)


def log_softmax(data, axis):
r"""Computes log softmax.
.. math::
\text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)}
.. note::
This operator can be optimized away for inference.
Parameters
----------
data: relay.Expr
The input data to the operator.
axis: int
The axis to sum over when computing softmax
"""

return _make.log_softmax(data, axis)


def max_pool2d(data,
pool_size=(1, 1),
strides=(1, 1),
Expand Down
29 changes: 29 additions & 0 deletions src/relay/op/nn/nn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,35 @@ RELAY_REGISTER_OP("nn.softmax")
.set_support_level(1)
.add_type_rel("Identity", IdentityRel);


TVM_REGISTER_API("relay.op.nn._make.log_softmax")
.set_body([](const TVMArgs& args, TVMRetValue* rv) {
auto make_func = [](Expr data, int axis) {
auto attrs = make_node<SoftmaxAttrs>();
attrs->axis = axis;
static const Op& op = Op::Get("nn.log_softmax");
return CallNode::make(op, {data}, Attrs(attrs), {});
};

runtime::detail::unpack_call<Expr, 2>(make_func, args, rv);
});

RELAY_REGISTER_OP("nn.log_softmax")
.describe(R"code(Computes log softmax.
.. math:: \text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)}
.. note::
This operator can be optimized away for inference.
- **data**: The input data
)code" TVM_ADD_FILELINE)
.set_num_inputs(1)
.add_argument("data", "Tensor", "The input tensor.")
.set_support_level(1)
.add_type_rel("Identity", IdentityRel);


// BatchFlatten
bool BatchFlattenRel(const Array<Type>& types,
int num_inputs,
Expand Down
13 changes: 13 additions & 0 deletions tests/python/relay/test_op_level1.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,18 @@ def test_softmax():
assert ftype.ret_type == relay.ty.TensorType((n, d), "float32")


def test_log_softmax():
ib = relay.ir_builder.IRBuilder()
n, d = tvm.var("n"), tvm.var("d")
x = ib.param("x", relay.ty.TensorType((n, d), "float32"))
with ib.function(x) as func:
ib.ret(relay.nn.log_softmax(x, axis=1))
ib.ret(func)

func = relay.ir_pass.infer_type(ib.env, func.to_func())
ftype = func.checked_type
assert ftype.ret_type == relay.ty.TensorType((n, d), "float32")

def test_unary_op():
for op in [relay.exp,
relay.log,
Expand Down Expand Up @@ -162,5 +174,6 @@ def test_concatenate_infer_type():
test_expand_dims_infer_type()
test_concatenate_infer_type()
test_softmax()
test_log_softmax()
test_binary_op()
test_binary_broadcast_op()

0 comments on commit fe035db

Please sign in to comment.