Skip to content

Commit

Permalink
fix: batch norm weights tensor to Dense
Browse files Browse the repository at this point in the history
  • Loading branch information
i8run committed Feb 9, 2018
1 parent 97a9b41 commit 8349eb4
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ class Linear[T: ClassTag](
primitive
}

var _shouldConvert: Boolean = true
var _shouldConvert: Boolean = false
def shouldConvert: Boolean = _shouldConvert
def setShouldConvert(v: Boolean): this.type = {
_shouldConvert = v
Expand Down Expand Up @@ -683,7 +683,7 @@ class Linear[T: ClassTag](

if (gradOutputReorderPrim != 0) {
if (gradOutput.getTensorType == DenseType && gradOutputPtr != 0) {
MklDnn.MemoryReleaseDataHandle(weight.storage().array().asInstanceOf[Array[Float]],
MklDnn.MemoryReleaseDataHandle(gradOutput.storage().array().asInstanceOf[Array[Float]],
gradOutputPtr)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -417,7 +417,7 @@ class SpatialBatchNormalization[T: ClassTag](
// in mkl dnn, the weight and bias should be all in the same array
private def createParams(initWeight: Tensor[T], initBias: Tensor[T]): Tensor[T] = {
val weightAndBias: Tensor[T] = if (affine) {
MklDnnTensor[T](Array(2 * nOutput))
Tensor[T](Array(2 * nOutput))
} else {
null
}
Expand Down

0 comments on commit 8349eb4

Please sign in to comment.