Skip to content

Commit

Permalink
fix scala style check and disable header check (intel-analytics#4715)
Browse files Browse the repository at this point in the history
  • Loading branch information
Le-Zheng authored Sep 13, 2021
1 parent cdc0a22 commit 31b12ed
Show file tree
Hide file tree
Showing 42 changed files with 537 additions and 519 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import com.intel.analytics.bigdl.dllib.{nn => bnn}
import com.intel.analytics.bigdl.dllib.keras.layers._
import com.intel.analytics.bigdl.dllib.keras.layers.internal._
import com.intel.analytics.bigdl.dllib.keras.models._
//import com.intel.analytics.bigdl.dllib.keras.layers.TimeDistributed
// import com.intel.analytics.bigdl.dllib.keras.layers.TimeDistributed

import scala.reflect.ClassTag

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package com.intel.analytics.bigdl.dllib.common
import com.intel.analytics.bigdl.mkl.{MKL => BMKL}
import com.intel.analytics.bigdl.dllib.tensor.{DoubleType, FloatType, Tensor}
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
//import com.intel.analytics.zoo.mkl.MKL.{vdErf, vsErf}
// import com.intel.analytics.zoo.mkl.MKL.{vdErf, vsErf}
import org.apache.log4j.Logger

import scala.reflect.ClassTag
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.dllib.optim.{LocalPredictor, ValidationMethod, _}
import com.intel.analytics.bigdl.dllib.feature.image.ImageSet
import com.intel.analytics.bigdl.dllib.feature.text.TextSet
//import com.intel.analytics.zoo.pipeline.api.net.TFNet
// import com.intel.analytics.zoo.pipeline.api.net.TFNet

import scala.collection.JavaConverters._
import scala.reflect.ClassTag
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@ import com.intel.analytics.bigdl.DataSet
import com.intel.analytics.bigdl.dllib.feature.dataset.{AbstractDataSet, DistributedDataSet, MiniBatch, Transformer}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.RandomGenerator
//import com.intel.analytics.bigdl.dllib.utils.PythonInterpreter
// import com.intel.analytics.bigdl.dllib.utils.PythonInterpreter
import com.intel.analytics.bigdl.dllib.feature.common.{ArrayLike, ArrayLikeWrapper}
//import com.intel.analytics.bigdl.dllib.feature.pmem._
// import com.intel.analytics.bigdl.dllib.feature.pmem._
import com.intel.analytics.bigdl.dllib.utils.Engine
import org.apache.spark.{SparkContext, TaskContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.slf4j.{Logger, LoggerFactory}
//import jep._
// import jep._

import scala.reflect.ClassTag
import scala.collection.JavaConverters._
Expand Down Expand Up @@ -331,7 +331,7 @@ class CachedDistributedFeatureSet[T: ClassTag]
}
}
//
//object PythonFeatureSet{
// object PythonFeatureSet{
// // One partition one loader
// protected def getLocalLoader(loaderName: String): String = {
// s"${loaderName}_${TaskContext.getPartitionId()}"
Expand Down Expand Up @@ -419,9 +419,9 @@ class CachedDistributedFeatureSet[T: ClassTag]
// Tensor[Float]()
// }
// }
//}
// }
//
//class PythonFeatureSet[T: ClassTag](
// class PythonFeatureSet[T: ClassTag](
// dataset: Array[Byte],
// getLoader: (Int, Int, String) => String,
// getIterator: (String, String, Boolean) => String,
Expand Down Expand Up @@ -543,7 +543,7 @@ class CachedDistributedFeatureSet[T: ClassTag]
// override def toDistributed(): DistributedDataSet[T] = {
// new DistributedDataSetWrapper[T](this)
// }
//}
// }

/**
* Wrap a RDD as a FeatureSet. RDD will be persist on local disk, and will load
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,11 @@ object MTSampleToMiniBatch {
* @param batchSize total batch size
* @param transformer transformer who rawData to Sample
* @param featurePaddingParam feature padding strategy, see
* [[com.intel.analytics.bigdl.dllib.feature.dataset.PaddingParam]] for details.
* [[com.intel.analytics.bigdl.dllib.feature.dataset.PaddingParam]]
* for details.
* @param labelPaddingParam label padding strategy, see
* [[com.intel.analytics.bigdl.dllib.feature.dataset.PaddingParam]] for details.
* [[com.intel.analytics.bigdl.dllib.feature.dataset.PaddingParam]]
* for details.
* @return
*/
def apply[A: ClassTag, T: ClassTag](
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,11 @@ trait Preprocessing[A, B] extends Transformer[A, B] {

def apply(imageSet: ImageSet): ImageSet = {
this match {
case xs: com.intel.analytics.bigdl.dllib.feature.common.Preprocessing[com.intel.analytics.bigdl.dllib.feature.transform.vision.image.ImageFeature,com.intel.analytics.bigdl.dllib.feature.transform.vision.image.ImageFeature] =>
imageSet.transform(this.asInstanceOf[Preprocessing[ImageFeature, ImageFeature]])
case _ => throw new IllegalArgumentException("We expect " +
case xs: com.intel.analytics.bigdl.dllib.feature.common.Preprocessing[com.intel.analytics.
bigdl.dllib.feature.transform.vision.image.ImageFeature, com.intel.analytics.bigdl.dllib.
feature.transform.vision.image.ImageFeature] =>
imageSet.transform(this.asInstanceOf[Preprocessing[ImageFeature, ImageFeature]])
case _ => throw new IllegalArgumentException("We expect " +
"Preprocessing[ImageFeature, ImageFeature] here")
}
// if (this.isInstanceOf[Preprocessing[ImageFeature, ImageFeature]]) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
///*
// /*
// * Copyright 2018 Analytics Zoo Authors.
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -14,26 +14,27 @@
// * limitations under the License.
// */
//
//package com.intel.analytics.bigdl.dllib.feature.pmem
// package com.intel.analytics.bigdl.dllib.feature.pmem
//
//import com.intel.analytics.bigdl.dllib.feature.dataset.{ByteRecord, Sample}
//import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.ImageFeature
//import com.intel.analytics.bigdl.dllib.feature.{CachedDistributedFeatureSet, DistributedFeatureSet}
//import com.intel.analytics.bigdl.dllib.feature.common.ArrayLike
//import org.apache.spark.rdd.RDD
// import com.intel.analytics.bigdl.dllib.feature.dataset.{ByteRecord, Sample}
// import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.ImageFeature
// import com.intel.analytics.bigdl.dllib.feature.{CachedDistributedFeatureSet,
// DistributedFeatureSet}
// import com.intel.analytics.bigdl.dllib.feature.common.ArrayLike
// import org.apache.spark.rdd.RDD
//
//import scala.reflect.ClassTag
// import scala.reflect.ClassTag
//
//private[bigdl] abstract class NativeArrayConverter[T: ClassTag]
// private[bigdl] abstract class NativeArrayConverter[T: ClassTag]
// extends Serializable {
//
// def getBytesPerRecord(record: T): Long
//
// def toArray(recordIterator: Iterator[T],
// countPerPartition: Iterator[(Int, Long)]): Iterator[ArrayLike[T]]
//}
// }
//
//private[bigdl] class ByteRecordConverter(
// private[bigdl] class ByteRecordConverter(
// memoryType: MemoryType = PMEM) extends NativeArrayConverter[ByteRecord] {
//
// override def getBytesPerRecord(byteRecord: ByteRecord): Long = {
Expand All @@ -55,9 +56,9 @@
// }
// Iterator.single(ByteRecordArray(nativeArray, labels))
// }
//}
// }
//
//private[bigdl] case class ByteRecordArray(records: VarLenBytesArray,
// private[bigdl] case class ByteRecordArray(records: VarLenBytesArray,
// label: Array[Float]) extends ArrayLike[ByteRecord] {
// override def length: Int = {
// records.recordNum
Expand All @@ -68,9 +69,9 @@
// override def free(): Unit = {
// records.free()
// }
//}
// }
//
//private[bigdl] class SampleConverter(
// private[bigdl] class SampleConverter(
// memoryType: MemoryType = PMEM) extends NativeArrayConverter[Sample[Float]] {
//
// override def getBytesPerRecord(sample: Sample[Float]): Long = {
Expand All @@ -96,9 +97,9 @@
// }
// Iterator.single(SampleArray(nativeArray, featureSizes, labelSizes))
// }
//}
// }
//
//private[bigdl] case class SampleArray(
// private[bigdl] case class SampleArray(
// samples: VarLenFloatsArray,
// featureSizes: Array[Array[Array[Int]]],
// labelSizes: Array[Array[Array[Int]]]) extends ArrayLike[Sample[Float]] {
Expand All @@ -113,9 +114,9 @@
// override def free(): Unit = {
// samples.free()
// }
//}
// }
//
//private[bigdl] class ImageFeatureConverter(
// private[bigdl] class ImageFeatureConverter(
// memoryType: MemoryType = PMEM) extends NativeArrayConverter[ImageFeature] {
//
// override def getBytesPerRecord(imageFeature: ImageFeature): Long = {
Expand Down Expand Up @@ -145,14 +146,14 @@
// }
// Iterator.single(ImageFeatureArray(nativeArray, metrics))
// }
//}
// }
//
///**
// /**
// * Cached ImageFeatures in PMEM.
// * @param bytesData bytes in PMEM.
// * @param metrics ImageFeature without bytes, just some metrics.
// */
//private[bigdl] case class ImageFeatureArray(
// private[bigdl] case class ImageFeatureArray(
// bytesData: VarLenBytesArray,
// metrics: Array[ImageFeature]) extends ArrayLike[ImageFeature] {
// override def length: Int = {
Expand All @@ -166,9 +167,9 @@
// override def free(): Unit = {
// bytesData.free()
// }
//}
// }
//
//object PmemFeatureSet {
// object PmemFeatureSet {
//
// private def rdd[T: ClassTag](data: RDD[T],
// nativeArrayConverter: NativeArrayConverter[T],
Expand Down Expand Up @@ -221,4 +222,5 @@
// s"${implicitly[ClassTag[T]].runtimeClass} is not supported for now")
// }
// }
//}
// }
// }
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
///*
// /*
// * Copyright 2018 Analytics Zoo Authors.
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -14,11 +14,11 @@
// * limitations under the License.
// */
//
//package com.intel.analytics.bigdl.dllib.feature.pmem
// package com.intel.analytics.bigdl.dllib.feature.pmem
//
//import org.apache.spark.unsafe.Platform
// import org.apache.spark.unsafe.Platform
//
//object FloatArray {
// object FloatArray {
// def apply(iterator: Iterator[Float], numOfRecord: Int,
// memoryType: MemoryType = PMEM): FloatArray = {
// val nativeArray = new FloatArray(numOfRecord, memoryType = memoryType)
Expand All @@ -29,13 +29,13 @@
// }
// nativeArray
// }
//}
// }
//
///**
// /**
// * An float array with fixed size stored in native memory.
// * @param recordNum number of item for this array.
// */
//class FloatArray(val recordNum: Int,
// class FloatArray(val recordNum: Int,
// sizeOfRecordByBytes: Int = 4,
// memoryType: MemoryType = PMEM) extends NativeArray[Float](
// recordNum * sizeOfRecordByBytes, memoryType) {
Expand All @@ -55,9 +55,9 @@
// assert(index <= lastOffSet)
// index
// }
//}
// }
//
//object VarLenFloatsArray {
// object VarLenFloatsArray {
// // Backward compatible with Spark.6
// val FLOAT_ARRAY_OFFSET = {
// var unsafe: sun.misc.Unsafe = null
Expand All @@ -78,10 +78,10 @@
// 0
// }
// }
//}
// }
//
//
//class VarLenFloatsArray(recordNum: Int, totalSizeByBytes: Long,
// class VarLenFloatsArray(recordNum: Int, totalSizeByBytes: Long,
// memoryType: MemoryType = PMEM) extends NativeVarLenArray[Float](recordNum,
// totalSizeByBytes, memoryType, 2) {
//
Expand All @@ -90,4 +90,4 @@
// }
//
// override def getTypeOffSet(): Int = VarLenFloatsArray.FLOAT_ARRAY_OFFSET
//}
// }
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
///*
// /*
// * Copyright 2018 Analytics Zoo Authors.
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -14,27 +14,27 @@
// * limitations under the License.
// */
//
//package com.intel.analytics.bigdl.dllib.feature.pmem
// package com.intel.analytics.bigdl.dllib.feature.pmem
//
//import scala.collection.mutable.ArrayBuffer
// import scala.collection.mutable.ArrayBuffer
//
//sealed trait MemoryType extends Serializable
// sealed trait MemoryType extends Serializable
//
//case object PMEM extends MemoryType
// case object PMEM extends MemoryType
//
//case object DRAM extends MemoryType
// case object DRAM extends MemoryType
//
//case object DIRECT extends MemoryType
// case object DIRECT extends MemoryType
//
//case class DISK_AND_DRAM(numSlice: Int) extends MemoryType
// case class DISK_AND_DRAM(numSlice: Int) extends MemoryType
//
//sealed trait DataStrategy
// sealed trait DataStrategy
//
//case object PARTITIONED extends DataStrategy
// case object PARTITIONED extends DataStrategy
//
//case object REPLICATED extends DataStrategy
// case object REPLICATED extends DataStrategy
//
//object MemoryType {
// object MemoryType {
// def fromString(str: String): MemoryType = {
// val diskPattern = "DISK_(\\d+)".r
// str.toUpperCase() match {
Expand All @@ -47,21 +47,21 @@
// s"excepted PMEM, DRAM, DIRECT or DISK_n.")
// }
// }
//}
// }
//
//object NativeArray {
// object NativeArray {
// private val natives = new ArrayBuffer[NativeArray[_]]()
//
// def free(): Unit = {
// NativeArray.natives.map{_.free()}
// }
//}
// }
//
///**
// /**
// *
// * @param totalBytes
// */
//abstract class NativeArray[T](totalBytes: Long, memoryType: MemoryType) {
// abstract class NativeArray[T](totalBytes: Long, memoryType: MemoryType) {
//
// assert(totalBytes > 0, s"The size of bytes should be larger than 0, but got: ${totalBytes}!")
//
Expand Down Expand Up @@ -91,6 +91,6 @@
// }
//
// protected def indexOf(i: Int): Long
//}
// }
//
//
Loading

0 comments on commit 31b12ed

Please sign in to comment.