Skip to content
This repository has been archived by the owner on Feb 1, 2020. It is now read-only.

Commit

Permalink
Merge branch 'master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
srkreddy1238 authored May 23, 2018
2 parents 35b5a18 + 0e00ca3 commit a7788cb
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 5 deletions.
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -104,3 +104,4 @@ clean:
-include build/*.d
-include build/*/*.d
-include build/*/*/*.d
-include build/*/*/*/*.d
2 changes: 1 addition & 1 deletion python/nnvm/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def _convert_activation(insym, keras_layer, _):
return _sym.__add_scalar__(_sym.__mul_scalar__(insym, \
scalar=alpha), scalar=beta)
elif act_type == 'softmax':
return _sym.softmax(insym)
return _sym.softmax(insym, axis=1)
elif act_type == 'sigmoid':
return _sym.sigmoid(insym)
elif act_type == 'tanh':
Expand Down
4 changes: 2 additions & 2 deletions src/top/tensor/matrix_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ inline bool DotShape(const nnvm::NodeAttrs& attrs,
<< "dot shape inconsistent: " << lshape << " X " << rshape;

TShape oshape(lshape.ndim() + rshape.ndim() - 2);
for (int i = 0; i < lshape.ndim() - 1; i++) oshape[i] = lshape[i];
for (int i = 1; i < rshape.ndim(); i++) oshape[i + lshape.ndim() - 2] = rshape[i];
for (uint32_t i = 0; i < lshape.ndim() - 1; i++) oshape[i] = lshape[i];
for (uint32_t i = 1; i < rshape.ndim(); i++) oshape[i + lshape.ndim() - 2] = rshape[i];

NNVM_ASSIGN_OUTPUT_SHAPE(attrs, *out_attrs, 0, oshape);
return true;
Expand Down
2 changes: 1 addition & 1 deletion src/top/tensor/reduce.cc
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ Example::
for (dim_t i = 0; i < param.axis.ndim(); ++i) {
exclude_axis.insert(param.axis[i]);
}
for (dim_t i = 0; i < inputs[0].ndim(); ++i) {
for (dim_t i = 0; i < static_cast<int>(inputs[0].ndim()); ++i) {
if (exclude_axis.count(i) == 0) {
axis.push_back(make_const(Int(32), i));
}
Expand Down
2 changes: 1 addition & 1 deletion src/top/tensor/transform.cc
Original file line number Diff line number Diff line change
Expand Up @@ -760,7 +760,7 @@ inline bool TransposeCorrectLayout(const NodeAttrs& attrs,
} else {
CHECK_EQ(input.ndim(), param.axes.ndim());
for (size_t i = 0; i < input.ndim(); ++i) {
CHECK(param.axes[i] < input.ndim());
CHECK(param.axes[i] < static_cast<int>(input.ndim()));
new_layout << input.at(param.axes[i]);
}
}
Expand Down
10 changes: 10 additions & 0 deletions tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,15 @@ def test_forward_elemwise_add():
verify_keras_frontend(keras_model)


def test_forward_softmax():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('softmax')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_softrelu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('softplus')(data)
Expand Down Expand Up @@ -145,6 +154,7 @@ def test_forward_resnet50():

if __name__ == '__main__':
test_forward_elemwise_add()
test_forward_softmax()
test_forward_softrelu()
test_forward_leaky_relu()
test_forward_dense()
Expand Down

0 comments on commit a7788cb

Please sign in to comment.