Skip to content

Commit

Permalink
Optimizers: use member variable in parent class
Browse files Browse the repository at this point in the history
  • Loading branch information
Menooker committed May 24, 2019
1 parent e7ec66e commit 7e47204
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -807,7 +807,7 @@ class DistriOptimizer[T: ClassTag] (
state("warmupIterationNum") = warmupIterationNum
state("computeThresholdbatchSize") = computeThresholdbatchSize
state("maxDropPercentage") = maxDropPercentage
state("isLayerwiseScaled") = Utils.isLayerwiseScaled(_model)
state("isLayerwiseScaled") = Utils.isLayerwiseScaled(model)

val nodeNumber = Engine.nodeNumber()
val coresPerNode = Engine.coreNumber()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,17 @@ object LocalOptimizer {
/**
* Optimize a model on a single machine
*
* @param model model to be optimized
* @param dataset data set
* @param criterion criterion to be used
* @param _model model to be optimized
* @param _dataset data set
* @param _criterion criterion to be used
*/
class LocalOptimizer[T: ClassTag] (
model: Module[T],
dataset: LocalDataSet[MiniBatch[T]],
criterion: Criterion[T]
_model: Module[T],
_dataset: LocalDataSet[MiniBatch[T]],
_criterion: Criterion[T]
)(implicit ev: TensorNumeric[T])
extends Optimizer[T, MiniBatch[T]](
model, dataset, criterion) {
_model, _dataset, _criterion) {

import LocalOptimizer._
import Optimizer.{header, saveModel, saveState, checkSubModules, getHyperParameterLog}
Expand Down Expand Up @@ -114,8 +114,9 @@ class LocalOptimizer[T: ClassTag] (
state("isLayerwiseScaled") = Utils.isLayerwiseScaled(model)

dataset.shuffle()
val numSamples = dataset.data(train = false).map(_.size()).reduce(_ + _)
var iter = dataset.data(train = true)
val _dataset = dataset.asInstanceOf[LocalDataSet[MiniBatch[T]]]
val numSamples = _dataset.data(train = false).map(_.size()).reduce(_ + _)
var iter = _dataset.data(train = true)
logger.info("model thread pool size is " + Engine.model.getPoolSize)
while (!endWhen(state)) {
val start = System.nanoTime()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,7 @@ class ParallelOptimizer[T: ClassTag] (
asInstanceOf[Container[_, _, T]].modules,
optimMethodMap(this.model.getName), optimMethodMap)
} else {
require(optimMethodMap.contains(this._model.getName),
require(optimMethodMap.contains(this.model.getName),
"single layer model should have optim method set")
}

Expand Down Expand Up @@ -675,7 +675,7 @@ class ParallelOptimizer[T: ClassTag] (

private def defaultPrioritize(): mutable.HashMap[String, Int] = {
val priorities = new mutable.HashMap[String, Int]
val orders = ParallelOptimizer.getExecutionOrder(this._model)
val orders = ParallelOptimizer.getExecutionOrder(this.model)
val len = orders.size
orders.zipWithIndex.foreach(order => {
priorities.put(order._1.getName, len - order._2)
Expand Down Expand Up @@ -709,7 +709,7 @@ class ParallelOptimizer[T: ClassTag] (
state("warmupIterationNum") = warmupIterationNum
state("computeThresholdbatchSize") = computeThresholdbatchSize
state("maxDropPercentage") = maxDropPercentage
state("isLayerwiseScaled") = Utils.isLayerwiseScaled(_model)
state("isLayerwiseScaled") = Utils.isLayerwiseScaled(model)

val nodeNumber = Engine.nodeNumber()
val coresPerNode = Engine.coreNumber()
Expand Down

0 comments on commit 7e47204

Please sign in to comment.