diff --git a/example/image-classification/fine-tune.py b/example/image-classification/fine-tune.py index cfb43101542e..5a2a04d2c73b 100644 --- a/example/image-classification/fine-tune.py +++ b/example/image-classification/fine-tune.py @@ -13,7 +13,7 @@ def get_fine_tune_model(symbol, arg_params, num_classes, layer_name): num_classes: the number of classes for the fine-tune datasets layer_name: the layer name before the last fully-connected layer """ - all_layers = sym.get_internals() + all_layers = symbol.get_internals() net = all_layers[layer_name+'_output'] net = mx.symbol.FullyConnected(data=net, num_hidden=num_classes, name='fc') net = mx.symbol.SoftmaxOutput(data=net, name='softmax') diff --git a/example/image-classification/symbols/alexnet.py b/example/image-classification/symbols/alexnet.py index 2534797a9eba..e2b512b195c8 100755 --- a/example/image-classification/symbols/alexnet.py +++ b/example/image-classification/symbols/alexnet.py @@ -6,7 +6,7 @@ import mxnet as mx import numpy as np -def get_symbol(num_classes, dtype, **kwargs): +def get_symbol(num_classes, dtype='float32', **kwargs): input_data = mx.sym.Variable(name="data") if dtype == 'float16': input_data = mx.sym.Cast(data=input_data, dtype=np.float16) diff --git a/python/mxnet/gluon/data/sampler.py b/python/mxnet/gluon/data/sampler.py index 7bfc418399f5..f6cedf051727 100644 --- a/python/mxnet/gluon/data/sampler.py +++ b/python/mxnet/gluon/data/sampler.py @@ -47,7 +47,7 @@ def __init__(self, length): self._length = length def __iter__(self): - indices = range(self._length) + indices = list(range(self._length)) random.shuffle(indices) return iter(indices) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 1cd61d3545ba..c45cefaedbc0 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -370,11 +370,13 @@ def __setitem__(self, key, value): len(key), len(my_shape)) begin = [0 for _ in my_shape] end = [x for x in my_shape] + expand = [] for i, slice_i in enumerate(key): if isinstance(slice_i, integer_types): assert slice_i < my_shape[i] begin[i] = slice_i end[i] = slice_i + 1 + expand.append(i) elif isinstance(slice_i, py_slice): # only support continuous slicing assert slice_i.step is None, \ @@ -390,16 +392,14 @@ def __setitem__(self, key, value): if isinstance(value, NDArray): value = value.as_in_context(self.context) - _internal._crop_assign(self, value, out=self, - begin=begin, end=end) + self._slice_assign(value, begin, end, expand) elif isinstance(value, numeric_types): _internal._crop_assign_scalar(self, out=self, begin=begin, end=end, scalar=value) elif isinstance(value, (np.ndarray, np.generic)): - value = array(value, ctx=self.context) - _internal._crop_assign(self, value, out=self, - begin=begin, end=end) + value = array(value, ctx=self.context, dtype=self.dtype) + self._slice_assign(value, begin, end, expand) else: raise TypeError( 'NDArray does not support assignment with %s of type %s'%( @@ -410,6 +410,22 @@ def __setitem__(self, key, value): str(key), str(type(key)))) # pylint: enable=too-many-branches + def _slice_assign(self, value, begin, end, expand): + vshape = list(value.shape) + if expand and len(vshape) != len(begin): + if len(expand) + len(vshape) != len(begin): + sshape = [e - b for e, b in zip(end, begin)] + for i in reversed(expand): + sshape.pop(i) + raise ValueError( + "Cannot assign NDArray with shape %s to NDArray slice with " \ + "shape %s"%(str(vshape), str(sshape))) + for i in expand: + vshape.insert(i, 1) + value = value.reshape(vshape) + _internal._crop_assign(self, value, out=self, + begin=begin, end=end) + def __getitem__(self, key): """x.__getitem__(i) <=> x[i] @@ -750,7 +766,10 @@ def size(self): >>> np.prod(x.shape) 30 """ - return np.prod(self.shape) + size = 1 + for i in self.shape: + size *= i + return size @property def context(self): diff --git a/src/operator/tensor/matrix_op-inl.h b/src/operator/tensor/matrix_op-inl.h index 57e201a38896..aaab7ed26320 100644 --- a/src/operator/tensor/matrix_op-inl.h +++ b/src/operator/tensor/matrix_op-inl.h @@ -1567,7 +1567,7 @@ void StackOpForward(const nnvm::NodeAttrs& attrs, for (int i = 0; i < axis; ++i) { leading *= outputs[0].shape_[i]; } - for (index_t i = axis + 1; i < outputs[0].ndim(); ++i) { + for (int i = axis + 1; i < outputs[0].ndim(); ++i) { trailing *= outputs[0].shape_[i]; } size_t mid = outputs[0].shape_[axis]; @@ -1601,7 +1601,7 @@ void StackOpBackward(const nnvm::NodeAttrs& attrs, for (int i = 0; i < axis; ++i) { leading *= inputs[0].shape_[i]; } - for (index_t i = axis + 1; i < inputs[0].ndim(); ++i) { + for (int i = axis + 1; i < inputs[0].ndim(); ++i) { trailing *= inputs[0].shape_[i]; } size_t mid = inputs[0].shape_[axis]; diff --git a/tests/nightly/compilation_warnings/compilation_warnings.sh b/tests/nightly/compilation_warnings/compilation_warnings.sh new file mode 100644 index 000000000000..9c377e2dd1fb --- /dev/null +++ b/tests/nightly/compilation_warnings/compilation_warnings.sh @@ -0,0 +1,27 @@ + +runme() { + cmd=$* + echo "$cmd" + $cmd + ret=$? + if [[ ${ret} != 0 ]]; then + echo " " + echo "ERROR: Return value non-zero for: $cmd" + echo " " + exit 1 + fi +} + +sudo add-apt-repository ppa:ubuntu-toolchain-r/test +sudo apt-get update +sudo apt-get -y install time g++-5 +runme make clean >/dev/null +runme mkdir build +echo "Starting make" +cp make/config.mk . +sed -i -e 's/gcc/gcc-5/g' config.mk +sed -i -e 's/g++/g++-5/g' config.mk +runme /usr/bin/time -f "%e" make -j$(nproc) &> build/compile_output.txt +cat build/compile_output.txt +echo "Finished make. Now processing output" +python tests/nightly/compilation_warnings/process_output.py build/compile_output.txt diff --git a/tests/nightly/compilation_warnings/process_output.py b/tests/nightly/compilation_warnings/process_output.py new file mode 100644 index 000000000000..1a57d81f8dad --- /dev/null +++ b/tests/nightly/compilation_warnings/process_output.py @@ -0,0 +1,39 @@ +import re +import sys +import operator + +def process_output(command_output): + warnings = {} + regex = r"(.*):\swarning:\s(.*)" + lines = command_output.split("\n") + for line in lines[:-2]: + matches = re.finditer(regex, line) + for matchNum, match in enumerate(matches): + try: + warnings[match.group()] +=1 + except KeyError: + warnings[match.group()] =1 + time = lines[-2] + return time, warnings + +def generate_stats(warnings): + total_count = sum(warnings.values()) + sorted_warnings = sorted(warnings.items(), key=operator.itemgetter(1), reverse=True) + return sorted_warnings, total_count + +def print_summary(time, warnings): + sorted_warnings, total_count = generate_stats(warnings) + print "START - Compilation warnings count" + print total_count + print "END - Compilation warnings count" + print 'START - Compilation warnings summary' + print 'Time taken to compile:', time, 's' + print 'Total number of warnings:', total_count, '\n' + print 'Below is the list of unique warnings and the number of occurrences of that warning' + for warning, count in sorted_warnings: + print count, ': ', warning + print 'END - Compilation warnings summary' + +c_output = open(sys.argv[1],'r') +time, warnings = process_output(c_output.read()) +print_summary(time, warnings) diff --git a/tests/python/unittest/test_gluon_data.py b/tests/python/unittest/test_gluon_data.py index 0d25cc497d32..2407f8e25991 100644 --- a/tests/python/unittest/test_gluon_data.py +++ b/tests/python/unittest/test_gluon_data.py @@ -15,6 +15,8 @@ def test_array_dataset(): def prepare_record(): + if not os.path.isdir("data"): + os.makedirs('data') if not os.path.isdir("data/test_images"): os.system("wget http://data.mxnet.io/data/test_images.tar.gz -O data/test_images.tar.gz") os.system("tar -xf data/test_images.tar.gz -C data") diff --git a/tests/python/unittest/test_ndarray.py b/tests/python/unittest/test_ndarray.py index c0ed1aef2ba3..6d036f33c0f9 100644 --- a/tests/python/unittest/test_ndarray.py +++ b/tests/python/unittest/test_ndarray.py @@ -76,6 +76,14 @@ def test_ndarray_setitem(): x_np[:, 1:3, 1:2] = val.asnumpy() assert same(x.asnumpy(), x_np) + # short all-dim indexing + x = mx.nd.zeros(shape) + val = mx.nd.ones((3, 2)) + x[:, 1:3, 1] = val + x_np = np.zeros(shape, dtype=x.dtype) + x_np[:, 1:3, 1] = val.asnumpy() + assert same(x.asnumpy(), x_np) + x = mx.nd.zeros(shape) x[:, 1:3, 1] = 1 x_np = np.zeros(shape, dtype=x.dtype) @@ -258,6 +266,7 @@ def test_ndarray_slice(): assert A[1,2,3,4,5].asscalar() == A2[1,2,3,4,5] + def test_ndarray_crop(): # get crop x = mx.nd.ones((2, 3, 4))