Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[RELAY][OP]log_softmax op #1857

Merged
merged 1 commit into from
Oct 8, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/langref/relay_op.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ This level enables fully connected multi-layer perceptron.
tvm.relay.expand_dims
tvm.relay.concatenate
tvm.relay.nn.softmax
tvm.relay.nn.log_softmax
tvm.relay.subtract
tvm.relay.multiply
tvm.relay.divide
Expand Down Expand Up @@ -114,6 +115,7 @@ Level 1 Definitions
.. autofunction:: tvm.relay.sigmoid
.. autofunction:: tvm.relay.concatenate
.. autofunction:: tvm.relay.nn.softmax
.. autofunction:: tvm.relay.nn.log_softmax


Level 2 Definitions
Expand Down
22 changes: 22 additions & 0 deletions python/tvm/relay/op/nn/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,28 @@ def softmax(data, axis):
return _make.softmax(data, axis)


def log_softmax(data, axis):
r"""Computes log softmax.

.. math::

\text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)}

.. note::
This operator can be optimized away for inference.

Parameters
----------
data: relay.Expr
The input data to the operator.

axis: int
The axis to sum over when computing softmax
"""

return _make.log_softmax(data, axis)


def max_pool2d(data,
pool_size=(1, 1),
strides=(1, 1),
Expand Down
29 changes: 29 additions & 0 deletions src/relay/op/nn/nn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,35 @@ RELAY_REGISTER_OP("nn.softmax")
.set_support_level(1)
.add_type_rel("Identity", IdentityRel);


TVM_REGISTER_API("relay.op.nn._make.log_softmax")
.set_body([](const TVMArgs& args, TVMRetValue* rv) {
auto make_func = [](Expr data, int axis) {
auto attrs = make_node<SoftmaxAttrs>();
attrs->axis = axis;
static const Op& op = Op::Get("nn.log_softmax");
return CallNode::make(op, {data}, Attrs(attrs), {});
};

runtime::detail::unpack_call<Expr, 2>(make_func, args, rv);
});

RELAY_REGISTER_OP("nn.log_softmax")
.describe(R"code(Computes log softmax.

.. math:: \text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)}

.. note::
This operator can be optimized away for inference.

- **data**: The input data
)code" TVM_ADD_FILELINE)
.set_num_inputs(1)
.add_argument("data", "Tensor", "The input tensor.")
.set_support_level(1)
.add_type_rel("Identity", IdentityRel);


// BatchFlatten
bool BatchFlattenRel(const Array<Type>& types,
int num_inputs,
Expand Down
13 changes: 13 additions & 0 deletions tests/python/relay/test_op_level1.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,18 @@ def test_softmax():
assert ftype.ret_type == relay.ty.TensorType((n, d), "float32")


def test_log_softmax():
ib = relay.ir_builder.IRBuilder()
n, d = tvm.var("n"), tvm.var("d")
x = ib.param("x", relay.ty.TensorType((n, d), "float32"))
with ib.function(x) as func:
ib.ret(relay.nn.log_softmax(x, axis=1))
ib.ret(func)

func = relay.ir_pass.infer_type(ib.env, func.to_func())
ftype = func.checked_type
assert ftype.ret_type == relay.ty.TensorType((n, d), "float32")

def test_unary_op():
for op in [relay.exp,
relay.log,
Expand Down Expand Up @@ -162,5 +174,6 @@ def test_concatenate_infer_type():
test_expand_dims_infer_type()
test_concatenate_infer_type()
test_softmax()
test_log_softmax()
test_binary_op()
test_binary_broadcast_op()