Skip to content

Commit

Permalink
Fix manual things that cannot be covered through automation
Browse files Browse the repository at this point in the history
  • Loading branch information
Andrew Or committed May 27, 2015
1 parent da0b12f commit bbce12e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach
new BlockManagerMasterEndpoint(rpcEnv, true, conf, new LiveListenerBus)), conf, true)

val initialize = PrivateMethod[Unit]('initialize)
SizeEstimator invokePrivate initialize()
org.apache.spark.util.SizeEstimator invokePrivate initialize()
}

override def afterEach(): Unit = {
Expand Down Expand Up @@ -420,8 +420,8 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach
store = makeBlockManager(12000)
val list1 = List(new Array[Byte](2000), new Array[Byte](2000))
val list2 = List(new Array[Byte](500), new Array[Byte](1000), new Array[Byte](1500))
val list1SizeEstimate = SizeEstimator.estimate(list1.iterator.toArray)
val list2SizeEstimate = SizeEstimator.estimate(list2.iterator.toArray)
val list1SizeEstimate = org.apache.spark.SizeEstimator.estimate(list1.iterator.toArray)
val list2SizeEstimate = org.apache.spark.SizeEstimator.estimate(list2.iterator.toArray)
store.putIterator("list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store.putIterator("list2memory", list2.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store.putIterator("list2disk", list2.iterator, StorageLevel.DISK_ONLY, tellMaster = true)
Expand Down Expand Up @@ -674,7 +674,7 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach
store.putIterator("list3", list3.iterator, StorageLevel.DISK_ONLY, tellMaster = true)
val listForSizeEstimate = new ArrayBuffer[Any]
listForSizeEstimate ++= list1.iterator
val listSize = SizeEstimator.estimate(listForSizeEstimate)
val listSize = org.apache.spark.SizeEstimator.estimate(listForSizeEstimate)
// At this point LRU should not kick in because list3 is only on disk
assert(store.get("list1").isDefined, "list1 was not in store")
assert(store.get("list1").get.data.size === 2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ class MathExpressionsSuite extends QueryTest {
}

test("log") {
testOneToOneNonNegativeMathFunction(log, math.log)
testOneToOneNonNegativeMathFunction(org.apache.spark.sql.functions.log, math.log)
}

test("log10") {
Expand Down

0 comments on commit bbce12e

Please sign in to comment.