From bbce12e82f9dac6b74fe035bd9fca4c43c00c376 Mon Sep 17 00:00:00 2001 From: Andrew Or Date: Wed, 27 May 2015 13:18:46 -0700 Subject: [PATCH] Fix manual things that cannot be covered through automation --- .../org/apache/spark/storage/BlockManagerSuite.scala | 8 ++++---- .../scala/org/apache/spark/sql/MathExpressionsSuite.scala | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index 151955ef7f435..8a4efb937f35e 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -87,7 +87,7 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach new BlockManagerMasterEndpoint(rpcEnv, true, conf, new LiveListenerBus)), conf, true) val initialize = PrivateMethod[Unit]('initialize) - SizeEstimator invokePrivate initialize() + org.apache.spark.util.SizeEstimator invokePrivate initialize() } override def afterEach(): Unit = { @@ -420,8 +420,8 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach store = makeBlockManager(12000) val list1 = List(new Array[Byte](2000), new Array[Byte](2000)) val list2 = List(new Array[Byte](500), new Array[Byte](1000), new Array[Byte](1500)) - val list1SizeEstimate = SizeEstimator.estimate(list1.iterator.toArray) - val list2SizeEstimate = SizeEstimator.estimate(list2.iterator.toArray) + val list1SizeEstimate = org.apache.spark.SizeEstimator.estimate(list1.iterator.toArray) + val list2SizeEstimate = org.apache.spark.SizeEstimator.estimate(list2.iterator.toArray) store.putIterator("list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true) store.putIterator("list2memory", list2.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true) store.putIterator("list2disk", list2.iterator, StorageLevel.DISK_ONLY, tellMaster = true) @@ -674,7 +674,7 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach store.putIterator("list3", list3.iterator, StorageLevel.DISK_ONLY, tellMaster = true) val listForSizeEstimate = new ArrayBuffer[Any] listForSizeEstimate ++= list1.iterator - val listSize = SizeEstimator.estimate(listForSizeEstimate) + val listSize = org.apache.spark.SizeEstimator.estimate(listForSizeEstimate) // At this point LRU should not kick in because list3 is only on disk assert(store.get("list1").isDefined, "list1 was not in store") assert(store.get("list1").get.data.size === 2) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/MathExpressionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/MathExpressionsSuite.scala index c4281c4b55c02..dd68965444f5d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/MathExpressionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/MathExpressionsSuite.scala @@ -206,7 +206,7 @@ class MathExpressionsSuite extends QueryTest { } test("log") { - testOneToOneNonNegativeMathFunction(log, math.log) + testOneToOneNonNegativeMathFunction(org.apache.spark.sql.functions.log, math.log) } test("log10") {