Skip to content

Commit

Permalink
style
Browse files Browse the repository at this point in the history
  • Loading branch information
squito committed Aug 19, 2015
1 parent 9d56e1b commit 06c8ffa
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,10 @@ private[spark] class ExternalBlockStore(blockManager: BlockManager, executorId:
}
}

override def putBytes(blockId: BlockId, bytes: LargeByteBuffer, level: StorageLevel): PutResult = {
override def putBytes(
blockId: BlockId,
bytes: LargeByteBuffer,
level: StorageLevel): PutResult = {
putIntoExternalBlockStore(blockId, bytes, returnValues = true)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ private[spark] class TachyonBlockManager() extends ExternalBlockManager with Log
val is = file.getInStream(ReadType.CACHE)
try {
val size = file.length
//TODO get tachyon to support large blocks
// TODO get tachyon to support large blocks
val bs = new Array[Byte](size.asInstanceOf[Int])
ByteStreams.readFully(is, bs)
Some(LargeByteBufferHelper.asLargeByteBuffer(bs))
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/ShuffleSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkC
val rdd = sc.parallelize(1 to 1e6.toInt, 1).map{ i =>
val n = 3e3.toInt
val arr = new Array[Byte](n)
//need to make sure the array doesn't compress to something small
// need to make sure the array doesn't compress to something small
scala.util.Random.nextBytes(arr)
(i, arr)
}
Expand All @@ -304,7 +304,7 @@ abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkC
val rdd = sc.parallelize(1 to 1e6.toInt, 1).map{ i =>
val n = 3e3.toInt
val arr = new Array[Byte](n)
//need to make sure the array doesn't compress to something small
// need to make sure the array doesn't compress to something small
scala.util.Random.nextBytes(arr)
(2 * i, arr)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@ package org.apache.spark.rdd

import org.apache.spark._
import org.apache.spark.storage.{ReplicationBlockSizeLimitException, StorageLevel}
import org.scalatest.{Matchers, FunSuite}
import org.scalatest.Matchers

class LargePartitionCachingSuite extends FunSuite with SharedSparkContext with Matchers {
class LargePartitionCachingSuite extends SparkFunSuite with SharedSparkContext with Matchers {

def largePartitionRdd = sc.parallelize(1 to 1e6.toInt, 1).map{i => new Array[Byte](2.2e3.toInt)}
def largePartitionRdd: RDD[Array[Byte]] = {
sc.parallelize(1 to 1e6.toInt, 1).map{i => new Array[Byte](2.2e3.toInt)}
}

//just don't want to kill the test server
// just don't want to kill the test server
ignore("memory serialized cache large partitions") {
largePartitionRdd.persist(StorageLevel.MEMORY_ONLY_SER).count() should be (1e6.toInt)
}
Expand Down

0 comments on commit 06c8ffa

Please sign in to comment.