Skip to content

Commit

Permalink
Wrap some useful torch layers (intel#3)
Browse files Browse the repository at this point in the history
* more torch layers

* update

* python wrapper

* update alias

* style
  • Loading branch information
hkvision authored Apr 13, 2018
1 parent 6d882c8 commit 003b822
Show file tree
Hide file tree
Showing 11 changed files with 534 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}

import scala.collection.mutable.ArrayBuffer


abstract class ZooSpecHelper extends FlatSpec with Matchers with BeforeAndAfter {
protected val logger = Logger.getLogger(getClass)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape

class ActivationSpec extends KerasBaseSpec {

"tanh" should "be the same as Keras" in {
val kerasCode =
"""
Expand Down Expand Up @@ -140,4 +141,20 @@ class ActivationSpec extends KerasBaseSpec {
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode)
}

"linear" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[4, 5])
|input = np.random.random([2, 4, 5])
|output_tensor = Activation('linear')(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = Sequential[Float]()
val layer = Activation[Float]("linear", inputShape = Shape(4, 5))
seq.add(layer)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
package com.intel.analytics.zoo.pipeline.api.keras.layers

import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.zoo.pipeline.api.keras.layers.{Input => ZInput, InputLayer => ZInputLayer, Dense => ZDense, Sequential => ZSequential, Model => ZModel}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape

Expand All @@ -33,10 +32,10 @@ class DenseSpec extends KerasBaseSpec {
|output_tensor = Dense(2, activation="relu")(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = ZSequential[Float]()
val input = ZInputLayer[Float](inputShape = Shape(3), name = "input1")
val seq = Sequential[Float]()
val input = InputLayer[Float](inputShape = Shape(3), name = "input1")
seq.add(input)
val dense = ZDense[Float](2, activation = "relu")
val dense = Dense[Float](2, activation = "relu")
seq.add(dense)
seq.getOutputShape().toSingle().toArray should be (Array(-1, 2))
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
Expand All @@ -52,9 +51,9 @@ class DenseSpec extends KerasBaseSpec {
|Dense(2, init='one', input_shape=(10, 5, 7))(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val input = ZInput[Float](inputShape = Shape(10, 5, 7))
val dense = ZDense[Float](2, init = "one").inputs(input)
val model = ZModel(input, dense)
val input = Input[Float](inputShape = Shape(10, 5, 7))
val dense = Dense[Float](2, init = "one").inputs(input)
val model = Model(input, dense)
model.getOutputShape().toSingle().toArray should be (Array(-1, 10, 5, 2))
checkOutputAndGrad(model.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode, weightConverter, precision = 1e-4)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.intel.analytics.zoo.pipeline.api.keras.layers.extra

import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper
import com.intel.analytics.zoo.pipeline.api.keras.layers.Activation

class ActivationSpec extends ZooSpecHelper {

"ReLU6 Zoo" should "be the same as BigDL" in {
val blayer = ReLU6[Float]()
val zlayer = Activation[Float]("relu6", inputShape = Shape(4, 5))
zlayer.build(Shape(-1, 4, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 5))
val input = Tensor[Float](Array(2, 4, 5)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"TanhShrink Zoo" should "be the same as BigDL" in {
val blayer = TanhShrink[Float]()
val zlayer = Activation[Float]("tanh_shrink", inputShape = Shape(4, 5))
zlayer.build(Shape(-1, 4, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 5))
val input = Tensor[Float](Array(2, 4, 5)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"SoftMin Zoo" should "be the same as BigDL" in {
val blayer = SoftMin[Float]()
val zlayer = Activation[Float]("softmin", inputShape = Shape(4, 5))
zlayer.build(Shape(-1, 4, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 5))
val input = Tensor[Float](Array(2, 4, 5)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"LogSigmoid Zoo" should "be the same as BigDL" in {
val blayer = LogSigmoid[Float]()
val zlayer = Activation[Float]("log_sigmoid", inputShape = Shape(4, 5))
zlayer.build(Shape(-1, 4, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 5))
val input = Tensor[Float](Array(2, 4, 5)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"LogSoftMax Zoo" should "be the same as BigDL" in {
val blayer = LogSoftMax[Float]()
val zlayer = Activation[Float]("log_softmax", inputShape = Shape(10))
zlayer.build(Shape(-1, 10))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 10))
val input = Tensor[Float](Array(2, 10)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.intel.analytics.zoo.pipeline.api.keras.layers.extra

import com.intel.analytics.bigdl.nn.{AddConstant => BAddConstant}
import com.intel.analytics.zoo.pipeline.api.keras.layers.extra.{AddConstant => ZAddConstant}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper

class AddConstantSpec extends ZooSpecHelper {

"AddConstant 1 Zoo" should "be the same as BigDL" in {
val blayer = BAddConstant[Float](1)
val zlayer = ZAddConstant[Float](1, inputShape = Shape(4, 5))
zlayer.build(Shape(-1, 4, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 5))
val input = Tensor[Float](Array(3, 4, 5)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"AddConstant -0.4 Zoo" should "be the same as BigDL" in {
val blayer = BAddConstant[Float](-0.4)
val zlayer = ZAddConstant[Float](-0.4, inputShape = Shape(4, 8, 8))
zlayer.build(Shape(-1, 4, 8, 8))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 8, 8))
val input = Tensor[Float](Array(3, 4, 8, 8)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.intel.analytics.zoo.pipeline.api.keras.layers.extra

import com.intel.analytics.bigdl.nn.SpatialCrossMapLRN
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper

class LRN2DSpec extends ZooSpecHelper {

"LRN2D Zoo th" should "be the same as BigDL" in {
val blayer = SpatialCrossMapLRN[Float](5, 0.0001)
val zlayer = LRN2D[Float](inputShape = Shape(3, 32, 32))
zlayer.build(Shape(-1, 3, 32, 32))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 3, 32, 32))
val input = Tensor[Float](Array(10, 3, 32, 32)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"LRN2D Zoo tf" should "be the same as BigDL" in {
val blayer = SpatialCrossMapLRN[Float](5, 0.001, 0.75, 2.0)
val zlayer = LRN2D[Float](0.001, 2.0, 0.75, 5, inputShape = Shape(12, 12, 2))
zlayer.build(Shape(-1, 12, 12, 2))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 12, 12, 2))
val input = Tensor[Float](Array(10, 12, 12, 2)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.intel.analytics.zoo.pipeline.api.keras.layers.extra

import com.intel.analytics.bigdl.nn.{MulConstant => BMulConstant}
import com.intel.analytics.zoo.pipeline.api.keras.layers.extra.{MulConstant => ZMulConstant}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper

class MulConstantSpec extends ZooSpecHelper {

"MulConstant 0 Zoo" should "be the same as BigDL" in {
val blayer = BMulConstant[Float](0f)
val zlayer = ZMulConstant[Float](0f, inputShape = Shape(4, 5))
zlayer.build(Shape(-1, 4, 5))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 5))
val input = Tensor[Float](Array(3, 4, 5)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"MulConstant -1 Zoo" should "be the same as BigDL" in {
val blayer = BMulConstant[Float](-1)
val zlayer = ZMulConstant[Float](-1, inputShape = Shape(4, 8, 8))
zlayer.build(Shape(-1, 4, 8, 8))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 4, 8, 8))
val input = Tensor[Float](Array(3, 4, 8, 8)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.intel.analytics.zoo.pipeline.api.keras.layers.extra

import com.intel.analytics.bigdl.nn.{Narrow => BNarrow}
import com.intel.analytics.zoo.pipeline.api.keras.layers.extra.{Narrow => ZNarrow}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.keras.ZooSpecHelper

class NarrowSpec extends ZooSpecHelper {

"Narrow Zoo 2D" should "be the same as BigDL" in {
val blayer = BNarrow[Float](2, 3, -1)
val zlayer = ZNarrow[Float](1, 2, -1, inputShape = Shape(3))
zlayer.build(Shape(-1, 3))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 1))
val input = Tensor[Float](Array(2, 3)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"Narrow Zoo 3D" should "be the same as BigDL" in {
val blayer = BNarrow[Float](2, 2)
val zlayer = ZNarrow[Float](1, 1, inputShape = Shape(5, 6))
zlayer.build(Shape(-1, 5, 6))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 1, 6))
val input = Tensor[Float](Array(4, 5, 6)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"Narrow Zoo 3D with negative length" should "be the same as BigDL" in {
val blayer = BNarrow[Float](3, 4, -1)
val zlayer = ZNarrow[Float](2, 3, -1, inputShape = Shape(5, 6))
zlayer.build(Shape(-1, 5, 6))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 5, 3))
val input = Tensor[Float](Array(4, 5, 6)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"Narrow Zoo 4D" should "be the same as BigDL" in {
val blayer = BNarrow[Float](2, 3, 3)
val zlayer = ZNarrow[Float](1, 2, 3, inputShape = Shape(8, 5, 6))
zlayer.build(Shape(-1, 8, 5, 6))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 3, 5, 6))
val input = Tensor[Float](Array(2, 8, 5, 6)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"Narrow Zoo 4D with negative length" should "be the same as BigDL" in {
val blayer = BNarrow[Float](-1, 4, -2)
val zlayer = ZNarrow[Float](-1, 3, -2, inputShape = Shape(5, 6, 7))
zlayer.build(Shape(-1, 5, 6, 7))
zlayer.getOutputShape().toSingle().toArray should be (Array(-1, 5, 6, 3))
val input = Tensor[Float](Array(2, 5, 6, 7)).rand()
compareOutputAndGradInput(blayer, zlayer, input)
}

"Narrow the batch dimension" should "raise an exception" in {
intercept[RuntimeException] {
val zlayer = ZNarrow[Float](0, 0, inputShape = Shape(2, 3, 4))
zlayer.build(Shape(-1, 2, 3, 4))
}
}

"Narrow offset too large" should "raise an exception" in {
intercept[RuntimeException] {
val zlayer = ZNarrow[Float](1, 2, inputShape = Shape(2, 3, 4))
zlayer.build(Shape(-1, 2, 3, 4))
}
}

"Narrow length too large" should "raise an exception" in {
intercept[RuntimeException] {
val zlayer = ZNarrow[Float](1, 1, 2, inputShape = Shape(2, 3, 4))
zlayer.build(Shape(-1, 2, 3, 4))
}
}

}
Loading

0 comments on commit 003b822

Please sign in to comment.