From 07c01664bb2627c9257439e39855000c63c5720d Mon Sep 17 00:00:00 2001 From: Siva Date: Tue, 22 May 2018 22:57:40 +0530 Subject: [PATCH 1/3] Cleanup of '-Wsign-compare' warnnigs. (#504) --- src/top/tensor/matrix_op.cc | 4 ++-- src/top/tensor/reduce.cc | 2 +- src/top/tensor/transform.cc | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/top/tensor/matrix_op.cc b/src/top/tensor/matrix_op.cc index 760896733..87aae9b2d 100644 --- a/src/top/tensor/matrix_op.cc +++ b/src/top/tensor/matrix_op.cc @@ -34,8 +34,8 @@ inline bool DotShape(const nnvm::NodeAttrs& attrs, << "dot shape inconsistent: " << lshape << " X " << rshape; TShape oshape(lshape.ndim() + rshape.ndim() - 2); - for (int i = 0; i < lshape.ndim() - 1; i++) oshape[i] = lshape[i]; - for (int i = 1; i < rshape.ndim(); i++) oshape[i + lshape.ndim() - 2] = rshape[i]; + for (uint32_t i = 0; i < lshape.ndim() - 1; i++) oshape[i] = lshape[i]; + for (uint32_t i = 1; i < rshape.ndim(); i++) oshape[i + lshape.ndim() - 2] = rshape[i]; NNVM_ASSIGN_OUTPUT_SHAPE(attrs, *out_attrs, 0, oshape); return true; diff --git a/src/top/tensor/reduce.cc b/src/top/tensor/reduce.cc index 00f14f52b..2befe1cab 100644 --- a/src/top/tensor/reduce.cc +++ b/src/top/tensor/reduce.cc @@ -145,7 +145,7 @@ Example:: for (dim_t i = 0; i < param.axis.ndim(); ++i) { exclude_axis.insert(param.axis[i]); } - for (dim_t i = 0; i < inputs[0].ndim(); ++i) { + for (dim_t i = 0; i < static_cast(inputs[0].ndim()); ++i) { if (exclude_axis.count(i) == 0) { axis.push_back(make_const(Int(32), i)); } diff --git a/src/top/tensor/transform.cc b/src/top/tensor/transform.cc index 6957e7175..f4e5a7be4 100644 --- a/src/top/tensor/transform.cc +++ b/src/top/tensor/transform.cc @@ -760,7 +760,7 @@ inline bool TransposeCorrectLayout(const NodeAttrs& attrs, } else { CHECK_EQ(input.ndim(), param.axes.ndim()); for (size_t i = 0; i < input.ndim(); ++i) { - CHECK(param.axes[i] < input.ndim()); + CHECK(param.axes[i] < static_cast(input.ndim())); new_layout << input.at(param.axes[i]); } } From fce960c42f5e7eb95abf354cd0286a07af865ef0 Mon Sep 17 00:00:00 2001 From: Siva Date: Tue, 22 May 2018 23:00:53 +0530 Subject: [PATCH 2/3] Increase depfiles lookup. (#509) Not able link build/src/top/tensor/*.d. Hence don't compile nnvm for a change in tvm/topi headers. --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 62a4fadad..8055a7e55 100644 --- a/Makefile +++ b/Makefile @@ -104,3 +104,4 @@ clean: -include build/*.d -include build/*/*.d -include build/*/*/*.d +-include build/*/*/*/*.d From 0e00ca3b81d1317bb47dfc75a9ef3ec4684293f9 Mon Sep 17 00:00:00 2001 From: MORITA Kazutaka Date: Wed, 23 May 2018 02:32:04 +0900 Subject: [PATCH 3/3] [FRONTEND][Keras] Fix softmax axis (#503) --- python/nnvm/frontend/keras.py | 2 +- tests/python/frontend/keras/test_forward.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/python/nnvm/frontend/keras.py b/python/nnvm/frontend/keras.py index a4d43cd43..0d51487c3 100644 --- a/python/nnvm/frontend/keras.py +++ b/python/nnvm/frontend/keras.py @@ -40,7 +40,7 @@ def _convert_activation(insym, keras_layer, _): return _sym.__add_scalar__(_sym.__mul_scalar__(insym, \ scalar=alpha), scalar=beta) elif act_type == 'softmax': - return _sym.softmax(insym) + return _sym.softmax(insym, axis=1) elif act_type == 'sigmoid': return _sym.sigmoid(insym) elif act_type == 'tanh': diff --git a/tests/python/frontend/keras/test_forward.py b/tests/python/frontend/keras/test_forward.py index 58a3d8c12..0147a3e2c 100644 --- a/tests/python/frontend/keras/test_forward.py +++ b/tests/python/frontend/keras/test_forward.py @@ -59,6 +59,15 @@ def test_forward_elemwise_add(): verify_keras_frontend(keras_model) +def test_forward_softmax(): + data = keras.layers.Input(shape=(32,32,3)) + x = keras.layers.Activation('softmax')(data) + x = keras.layers.Concatenate()([x, x]) + x = keras.layers.GlobalMaxPooling2D()(x) + keras_model = keras.models.Model(data, x) + verify_keras_frontend(keras_model) + + def test_forward_softrelu(): data = keras.layers.Input(shape=(32,32,3)) x = keras.layers.Activation('softplus')(data) @@ -145,6 +154,7 @@ def test_forward_resnet50(): if __name__ == '__main__': test_forward_elemwise_add() + test_forward_softmax() test_forward_softrelu() test_forward_leaky_relu() test_forward_dense()