From d1d4ea5044746490e83121b2bf582f74f359a882 Mon Sep 17 00:00:00 2001 From: Przemyslaw Tredak Date: Tue, 25 Jul 2017 12:23:52 -0700 Subject: [PATCH 1/4] Set default value for dtype for alexnet (#7193) --- example/image-classification/symbols/alexnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/image-classification/symbols/alexnet.py b/example/image-classification/symbols/alexnet.py index 2534797a9eba..e2b512b195c8 100755 --- a/example/image-classification/symbols/alexnet.py +++ b/example/image-classification/symbols/alexnet.py @@ -6,7 +6,7 @@ import mxnet as mx import numpy as np -def get_symbol(num_classes, dtype, **kwargs): +def get_symbol(num_classes, dtype='float32', **kwargs): input_data = mx.sym.Variable(name="data") if dtype == 'float16': input_data = mx.sym.Cast(data=input_data, dtype=np.float16) From f3f8a9957c6d63947bbd5f7c8e209f99ba58a99c Mon Sep 17 00:00:00 2001 From: Eric Junyuan Xie Date: Wed, 26 Jul 2017 01:26:39 -0700 Subject: [PATCH 2/4] fix ndarray setitem (#7195) * fix ndarray setitem * fix * fix * fix --- python/mxnet/gluon/data/sampler.py | 2 +- python/mxnet/ndarray.py | 31 +++++++++++++++++++----- tests/python/unittest/test_gluon_data.py | 2 ++ tests/python/unittest/test_ndarray.py | 9 +++++++ 4 files changed, 37 insertions(+), 7 deletions(-) diff --git a/python/mxnet/gluon/data/sampler.py b/python/mxnet/gluon/data/sampler.py index 7bfc418399f5..f6cedf051727 100644 --- a/python/mxnet/gluon/data/sampler.py +++ b/python/mxnet/gluon/data/sampler.py @@ -47,7 +47,7 @@ def __init__(self, length): self._length = length def __iter__(self): - indices = range(self._length) + indices = list(range(self._length)) random.shuffle(indices) return iter(indices) diff --git a/python/mxnet/ndarray.py b/python/mxnet/ndarray.py index dff4889749c0..d02119166562 100644 --- a/python/mxnet/ndarray.py +++ b/python/mxnet/ndarray.py @@ -377,11 +377,13 @@ def __setitem__(self, key, value): len(key), len(my_shape)) begin = [0 for _ in my_shape] end = [x for x in my_shape] + expand = [] for i, slice_i in enumerate(key): if isinstance(slice_i, integer_types): assert slice_i < my_shape[i] begin[i] = slice_i end[i] = slice_i + 1 + expand.append(i) elif isinstance(slice_i, py_slice): # only support continuous slicing assert slice_i.step is None, \ @@ -397,16 +399,14 @@ def __setitem__(self, key, value): if isinstance(value, NDArray): value = value.as_in_context(self.context) - _internal._crop_assign(self, value, out=self, - begin=begin, end=end) + self._slice_assign(value, begin, end, expand) elif isinstance(value, numeric_types): _internal._crop_assign_scalar(self, out=self, begin=begin, end=end, scalar=value) elif isinstance(value, (np.ndarray, np.generic)): - value = array(value, ctx=self.context) - _internal._crop_assign(self, value, out=self, - begin=begin, end=end) + value = array(value, ctx=self.context, dtype=self.dtype) + self._slice_assign(value, begin, end, expand) else: raise TypeError( 'NDArray does not support assignment with %s of type %s'%( @@ -417,6 +417,22 @@ def __setitem__(self, key, value): str(key), str(type(key)))) # pylint: enable=too-many-branches + def _slice_assign(self, value, begin, end, expand): + vshape = list(value.shape) + if expand and len(vshape) != len(begin): + if len(expand) + len(vshape) != len(begin): + sshape = [e - b for e, b in zip(end, begin)] + for i in reversed(expand): + sshape.pop(i) + raise ValueError( + "Cannot assign NDArray with shape %s to NDArray slice with " \ + "shape %s"%(str(vshape), str(sshape))) + for i in expand: + vshape.insert(i, 1) + value = value.reshape(vshape) + _internal._crop_assign(self, value, out=self, + begin=begin, end=end) + def __getitem__(self, key): """x.__getitem__(i) <=> x[i] @@ -757,7 +773,10 @@ def size(self): >>> np.prod(x.shape) 30 """ - return np.prod(self.shape) + size = 1 + for i in self.shape: + size *= i + return size @property def context(self): diff --git a/tests/python/unittest/test_gluon_data.py b/tests/python/unittest/test_gluon_data.py index 0d25cc497d32..2407f8e25991 100644 --- a/tests/python/unittest/test_gluon_data.py +++ b/tests/python/unittest/test_gluon_data.py @@ -15,6 +15,8 @@ def test_array_dataset(): def prepare_record(): + if not os.path.isdir("data"): + os.makedirs('data') if not os.path.isdir("data/test_images"): os.system("wget http://data.mxnet.io/data/test_images.tar.gz -O data/test_images.tar.gz") os.system("tar -xf data/test_images.tar.gz -C data") diff --git a/tests/python/unittest/test_ndarray.py b/tests/python/unittest/test_ndarray.py index 8b7f8d6d7bf3..f627ab837a3e 100644 --- a/tests/python/unittest/test_ndarray.py +++ b/tests/python/unittest/test_ndarray.py @@ -76,6 +76,14 @@ def test_ndarray_setitem(): x_np[:, 1:3, 1:2] = val.asnumpy() assert same(x.asnumpy(), x_np) + # short all-dim indexing + x = mx.nd.zeros(shape) + val = mx.nd.ones((3, 2)) + x[:, 1:3, 1] = val + x_np = np.zeros(shape, dtype=x.dtype) + x_np[:, 1:3, 1] = val.asnumpy() + assert same(x.asnumpy(), x_np) + x = mx.nd.zeros(shape) x[:, 1:3, 1] = 1 x_np = np.zeros(shape, dtype=x.dtype) @@ -258,6 +266,7 @@ def test_ndarray_slice(): assert A[1,2,3,4,5].asscalar() == A2[1,2,3,4,5] + def test_ndarray_crop(): # get crop x = mx.nd.ones((2, 3, 4)) From c79ec5d76e11720f04d0a6919b540b08ce25b357 Mon Sep 17 00:00:00 2001 From: Kenji Doi Date: Wed, 26 Jul 2017 17:27:51 +0900 Subject: [PATCH 3/4] Fix incorrect reference. (sym -> symbol) (#7199) --- example/image-classification/fine-tune.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/image-classification/fine-tune.py b/example/image-classification/fine-tune.py index cfb43101542e..5a2a04d2c73b 100644 --- a/example/image-classification/fine-tune.py +++ b/example/image-classification/fine-tune.py @@ -13,7 +13,7 @@ def get_fine_tune_model(symbol, arg_params, num_classes, layer_name): num_classes: the number of classes for the fine-tune datasets layer_name: the layer name before the last fully-connected layer """ - all_layers = sym.get_internals() + all_layers = symbol.get_internals() net = all_layers[layer_name+'_output'] net = mx.symbol.FullyConnected(data=net, num_hidden=num_classes, name='fc') net = mx.symbol.SoftmaxOutput(data=net, name='softmax') From 500ea39b6ed7c1f2d1a02e41f26a7a6f2c33c16b Mon Sep 17 00:00:00 2001 From: Rahul Date: Wed, 26 Jul 2017 01:28:23 -0700 Subject: [PATCH 4/4] Compilation warnings (#7198) * add jenkins script to monitor compile warnings, and some more cast warnings fixed * update jenkins script to time compilation install package time * g++5 with timing * redirect compile output to file * only output real time * print compile output to help debug * fix typo --- src/operator/tensor/matrix_op-inl.h | 4 +- .../compilation_warnings.sh | 27 +++++++++++++ .../compilation_warnings/process_output.py | 39 +++++++++++++++++++ 3 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 tests/nightly/compilation_warnings/compilation_warnings.sh create mode 100644 tests/nightly/compilation_warnings/process_output.py diff --git a/src/operator/tensor/matrix_op-inl.h b/src/operator/tensor/matrix_op-inl.h index 75da055d0098..26f409a43525 100644 --- a/src/operator/tensor/matrix_op-inl.h +++ b/src/operator/tensor/matrix_op-inl.h @@ -1834,7 +1834,7 @@ void StackOpForward(const nnvm::NodeAttrs& attrs, for (int i = 0; i < axis; ++i) { leading *= outputs[0].shape_[i]; } - for (index_t i = axis + 1; i < outputs[0].ndim(); ++i) { + for (int i = axis + 1; i < outputs[0].ndim(); ++i) { trailing *= outputs[0].shape_[i]; } size_t mid = outputs[0].shape_[axis]; @@ -1868,7 +1868,7 @@ void StackOpBackward(const nnvm::NodeAttrs& attrs, for (int i = 0; i < axis; ++i) { leading *= inputs[0].shape_[i]; } - for (index_t i = axis + 1; i < inputs[0].ndim(); ++i) { + for (int i = axis + 1; i < inputs[0].ndim(); ++i) { trailing *= inputs[0].shape_[i]; } size_t mid = inputs[0].shape_[axis]; diff --git a/tests/nightly/compilation_warnings/compilation_warnings.sh b/tests/nightly/compilation_warnings/compilation_warnings.sh new file mode 100644 index 000000000000..9c377e2dd1fb --- /dev/null +++ b/tests/nightly/compilation_warnings/compilation_warnings.sh @@ -0,0 +1,27 @@ + +runme() { + cmd=$* + echo "$cmd" + $cmd + ret=$? + if [[ ${ret} != 0 ]]; then + echo " " + echo "ERROR: Return value non-zero for: $cmd" + echo " " + exit 1 + fi +} + +sudo add-apt-repository ppa:ubuntu-toolchain-r/test +sudo apt-get update +sudo apt-get -y install time g++-5 +runme make clean >/dev/null +runme mkdir build +echo "Starting make" +cp make/config.mk . +sed -i -e 's/gcc/gcc-5/g' config.mk +sed -i -e 's/g++/g++-5/g' config.mk +runme /usr/bin/time -f "%e" make -j$(nproc) &> build/compile_output.txt +cat build/compile_output.txt +echo "Finished make. Now processing output" +python tests/nightly/compilation_warnings/process_output.py build/compile_output.txt diff --git a/tests/nightly/compilation_warnings/process_output.py b/tests/nightly/compilation_warnings/process_output.py new file mode 100644 index 000000000000..1a57d81f8dad --- /dev/null +++ b/tests/nightly/compilation_warnings/process_output.py @@ -0,0 +1,39 @@ +import re +import sys +import operator + +def process_output(command_output): + warnings = {} + regex = r"(.*):\swarning:\s(.*)" + lines = command_output.split("\n") + for line in lines[:-2]: + matches = re.finditer(regex, line) + for matchNum, match in enumerate(matches): + try: + warnings[match.group()] +=1 + except KeyError: + warnings[match.group()] =1 + time = lines[-2] + return time, warnings + +def generate_stats(warnings): + total_count = sum(warnings.values()) + sorted_warnings = sorted(warnings.items(), key=operator.itemgetter(1), reverse=True) + return sorted_warnings, total_count + +def print_summary(time, warnings): + sorted_warnings, total_count = generate_stats(warnings) + print "START - Compilation warnings count" + print total_count + print "END - Compilation warnings count" + print 'START - Compilation warnings summary' + print 'Time taken to compile:', time, 's' + print 'Total number of warnings:', total_count, '\n' + print 'Below is the list of unique warnings and the number of occurrences of that warning' + for warning, count in sorted_warnings: + print count, ': ', warning + print 'END - Compilation warnings summary' + +c_output = open(sys.argv[1],'r') +time, warnings = process_output(c_output.read()) +print_summary(time, warnings)