Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into dmlc-sparse-squash
Browse files Browse the repository at this point in the history
  • Loading branch information
eric-haibin-lin committed Jul 26, 2017
2 parents 05ddf38 + 500ea39 commit f57fc3c
Show file tree
Hide file tree
Showing 9 changed files with 107 additions and 11 deletions.
2 changes: 1 addition & 1 deletion example/image-classification/fine-tune.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def get_fine_tune_model(symbol, arg_params, num_classes, layer_name):
num_classes: the number of classes for the fine-tune datasets
layer_name: the layer name before the last fully-connected layer
"""
all_layers = sym.get_internals()
all_layers = symbol.get_internals()
net = all_layers[layer_name+'_output']
net = mx.symbol.FullyConnected(data=net, num_hidden=num_classes, name='fc')
net = mx.symbol.SoftmaxOutput(data=net, name='softmax')
Expand Down
2 changes: 1 addition & 1 deletion example/image-classification/symbols/alexnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import mxnet as mx
import numpy as np

def get_symbol(num_classes, dtype, **kwargs):
def get_symbol(num_classes, dtype='float32', **kwargs):
input_data = mx.sym.Variable(name="data")
if dtype == 'float16':
input_data = mx.sym.Cast(data=input_data, dtype=np.float16)
Expand Down
2 changes: 1 addition & 1 deletion python/mxnet/gluon/data/sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def __init__(self, length):
self._length = length

def __iter__(self):
indices = range(self._length)
indices = list(range(self._length))
random.shuffle(indices)
return iter(indices)

Expand Down
31 changes: 25 additions & 6 deletions python/mxnet/ndarray/ndarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,11 +370,13 @@ def __setitem__(self, key, value):
len(key), len(my_shape))
begin = [0 for _ in my_shape]
end = [x for x in my_shape]
expand = []
for i, slice_i in enumerate(key):
if isinstance(slice_i, integer_types):
assert slice_i < my_shape[i]
begin[i] = slice_i
end[i] = slice_i + 1
expand.append(i)
elif isinstance(slice_i, py_slice):
# only support continuous slicing
assert slice_i.step is None, \
Expand All @@ -390,16 +392,14 @@ def __setitem__(self, key, value):

if isinstance(value, NDArray):
value = value.as_in_context(self.context)
_internal._crop_assign(self, value, out=self,
begin=begin, end=end)
self._slice_assign(value, begin, end, expand)
elif isinstance(value, numeric_types):
_internal._crop_assign_scalar(self, out=self,
begin=begin, end=end,
scalar=value)
elif isinstance(value, (np.ndarray, np.generic)):
value = array(value, ctx=self.context)
_internal._crop_assign(self, value, out=self,
begin=begin, end=end)
value = array(value, ctx=self.context, dtype=self.dtype)
self._slice_assign(value, begin, end, expand)
else:
raise TypeError(
'NDArray does not support assignment with %s of type %s'%(
Expand All @@ -410,6 +410,22 @@ def __setitem__(self, key, value):
str(key), str(type(key))))
# pylint: enable=too-many-branches

def _slice_assign(self, value, begin, end, expand):
vshape = list(value.shape)
if expand and len(vshape) != len(begin):
if len(expand) + len(vshape) != len(begin):
sshape = [e - b for e, b in zip(end, begin)]
for i in reversed(expand):
sshape.pop(i)
raise ValueError(
"Cannot assign NDArray with shape %s to NDArray slice with " \
"shape %s"%(str(vshape), str(sshape)))
for i in expand:
vshape.insert(i, 1)
value = value.reshape(vshape)
_internal._crop_assign(self, value, out=self,
begin=begin, end=end)

def __getitem__(self, key):
"""x.__getitem__(i) <=> x[i]
Expand Down Expand Up @@ -750,7 +766,10 @@ def size(self):
>>> np.prod(x.shape)
30
"""
return np.prod(self.shape)
size = 1
for i in self.shape:
size *= i
return size

@property
def context(self):
Expand Down
4 changes: 2 additions & 2 deletions src/operator/tensor/matrix_op-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -1567,7 +1567,7 @@ void StackOpForward(const nnvm::NodeAttrs& attrs,
for (int i = 0; i < axis; ++i) {
leading *= outputs[0].shape_[i];
}
for (index_t i = axis + 1; i < outputs[0].ndim(); ++i) {
for (int i = axis + 1; i < outputs[0].ndim(); ++i) {
trailing *= outputs[0].shape_[i];
}
size_t mid = outputs[0].shape_[axis];
Expand Down Expand Up @@ -1601,7 +1601,7 @@ void StackOpBackward(const nnvm::NodeAttrs& attrs,
for (int i = 0; i < axis; ++i) {
leading *= inputs[0].shape_[i];
}
for (index_t i = axis + 1; i < inputs[0].ndim(); ++i) {
for (int i = axis + 1; i < inputs[0].ndim(); ++i) {
trailing *= inputs[0].shape_[i];
}
size_t mid = inputs[0].shape_[axis];
Expand Down
27 changes: 27 additions & 0 deletions tests/nightly/compilation_warnings/compilation_warnings.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@

runme() {
cmd=$*
echo "$cmd"
$cmd
ret=$?
if [[ ${ret} != 0 ]]; then
echo " "
echo "ERROR: Return value non-zero for: $cmd"
echo " "
exit 1
fi
}

sudo add-apt-repository ppa:ubuntu-toolchain-r/test
sudo apt-get update
sudo apt-get -y install time g++-5
runme make clean >/dev/null
runme mkdir build
echo "Starting make"
cp make/config.mk .
sed -i -e 's/gcc/gcc-5/g' config.mk
sed -i -e 's/g++/g++-5/g' config.mk
runme /usr/bin/time -f "%e" make -j$(nproc) &> build/compile_output.txt
cat build/compile_output.txt
echo "Finished make. Now processing output"
python tests/nightly/compilation_warnings/process_output.py build/compile_output.txt
39 changes: 39 additions & 0 deletions tests/nightly/compilation_warnings/process_output.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import re
import sys
import operator

def process_output(command_output):
warnings = {}
regex = r"(.*):\swarning:\s(.*)"
lines = command_output.split("\n")
for line in lines[:-2]:
matches = re.finditer(regex, line)
for matchNum, match in enumerate(matches):
try:
warnings[match.group()] +=1
except KeyError:
warnings[match.group()] =1
time = lines[-2]
return time, warnings

def generate_stats(warnings):
total_count = sum(warnings.values())
sorted_warnings = sorted(warnings.items(), key=operator.itemgetter(1), reverse=True)
return sorted_warnings, total_count

def print_summary(time, warnings):
sorted_warnings, total_count = generate_stats(warnings)
print "START - Compilation warnings count"
print total_count
print "END - Compilation warnings count"
print 'START - Compilation warnings summary'
print 'Time taken to compile:', time, 's'
print 'Total number of warnings:', total_count, '\n'
print 'Below is the list of unique warnings and the number of occurrences of that warning'
for warning, count in sorted_warnings:
print count, ': ', warning
print 'END - Compilation warnings summary'

c_output = open(sys.argv[1],'r')
time, warnings = process_output(c_output.read())
print_summary(time, warnings)
2 changes: 2 additions & 0 deletions tests/python/unittest/test_gluon_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ def test_array_dataset():


def prepare_record():
if not os.path.isdir("data"):
os.makedirs('data')
if not os.path.isdir("data/test_images"):
os.system("wget http://data.mxnet.io/data/test_images.tar.gz -O data/test_images.tar.gz")
os.system("tar -xf data/test_images.tar.gz -C data")
Expand Down
9 changes: 9 additions & 0 deletions tests/python/unittest/test_ndarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,14 @@ def test_ndarray_setitem():
x_np[:, 1:3, 1:2] = val.asnumpy()
assert same(x.asnumpy(), x_np)

# short all-dim indexing
x = mx.nd.zeros(shape)
val = mx.nd.ones((3, 2))
x[:, 1:3, 1] = val
x_np = np.zeros(shape, dtype=x.dtype)
x_np[:, 1:3, 1] = val.asnumpy()
assert same(x.asnumpy(), x_np)

x = mx.nd.zeros(shape)
x[:, 1:3, 1] = 1
x_np = np.zeros(shape, dtype=x.dtype)
Expand Down Expand Up @@ -258,6 +266,7 @@ def test_ndarray_slice():
assert A[1,2,3,4,5].asscalar() == A2[1,2,3,4,5]



def test_ndarray_crop():
# get crop
x = mx.nd.ones((2, 3, 4))
Expand Down

0 comments on commit f57fc3c

Please sign in to comment.