Skip to content

Commit

Permalink
Make sure we always drop all tables etc. after each suite
Browse files Browse the repository at this point in the history
  • Loading branch information
Andrew Or committed Mar 24, 2016
1 parent 70530cf commit dbcdce6
Show file tree
Hide file tree
Showing 15 changed files with 112 additions and 43 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -60,16 +60,19 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
}

override def afterAll() {
TestHive.cacheTables = false
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
TestHive.setConf(SQLConf.COLUMN_BATCH_SIZE, originalColumnBatchSize)
TestHive.setConf(SQLConf.IN_MEMORY_PARTITION_PRUNING, originalInMemoryPartitionPruning)
TestHive.sessionState.functionRegistry.restore()

// For debugging dump some statistics about how much time was spent in various optimizer rules.
logWarning(RuleExecutor.dumpTimeSpent())
super.afterAll()
try {
TestHive.cacheTables = false
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
TestHive.setConf(SQLConf.COLUMN_BATCH_SIZE, originalColumnBatchSize)
TestHive.setConf(SQLConf.IN_MEMORY_PARTITION_PRUNING, originalInMemoryPartitionPruning)
TestHive.sessionState.functionRegistry.restore()

// For debugging dump some statistics about how much time was spent in various optimizer rules.
logWarning(RuleExecutor.dumpTimeSpent())
} finally {
super.afterAll()
}
}

/** A list of tests deemed out of scope currently and thus completely disregarded. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,6 @@ object TestHive
// SPARK-8910
.set("spark.ui.enabled", "false")))

trait TestHiveSingleton {
protected val sqlContext: SQLContext = TestHive
protected val hiveContext: TestHiveContext = TestHive
}

/**
* A locally running test instance of Spark's Hive execution engine.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.hive.test

import org.scalatest.BeforeAndAfterAll

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.SQLContext


trait TestHiveSingleton extends SparkFunSuite with BeforeAndAfterAll {
protected val sqlContext: SQLContext = TestHive
protected val hiveContext: TestHiveContext = TestHive

protected override def afterAll(): Unit = {
try {
hiveContext.reset()
} finally {
super.afterAll()
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ class ExpressionToSQLSuite extends SQLBuilderTest with SQLTestUtils {
import testImplicits._

protected override def beforeAll(): Unit = {
super.beforeAll()
sql("DROP TABLE IF EXISTS t0")
sql("DROP TABLE IF EXISTS t1")
sql("DROP TABLE IF EXISTS t2")
Expand All @@ -43,9 +44,13 @@ class ExpressionToSQLSuite extends SQLBuilderTest with SQLTestUtils {
}

override protected def afterAll(): Unit = {
sql("DROP TABLE IF EXISTS t0")
sql("DROP TABLE IF EXISTS t1")
sql("DROP TABLE IF EXISTS t2")
try {
sql("DROP TABLE IF EXISTS t0")
sql("DROP TABLE IF EXISTS t1")
sql("DROP TABLE IF EXISTS t2")
} finally {
super.afterAll()
}
}

private def checkSqlGeneration(hiveQl: String): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,17 @@ class HiveDataFrameAnalyticsSuite extends QueryTest with TestHiveSingleton with
private var testData: DataFrame = _

override def beforeAll() {
super.beforeAll()
testData = Seq((1, 2), (2, 2), (3, 4)).toDF("a", "b")
hiveContext.registerDataFrameAsTable(testData, "mytable")
}

override def afterAll(): Unit = {
hiveContext.dropTempTable("mytable")
try {
hiveContext.dropTempTable("mytable")
} finally {
super.afterAll()
}
}

test("rollup") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.sql.hive

import java.io.File

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.{QueryTest, Row, SaveMode}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogTableType
Expand All @@ -28,7 +27,7 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{ExamplePointUDT, SQLTestUtils}
import org.apache.spark.sql.types.{DecimalType, StringType, StructType}

class HiveMetastoreCatalogSuite extends SparkFunSuite with TestHiveSingleton {
class HiveMetastoreCatalogSuite extends TestHiveSingleton {
import hiveContext.implicits._

test("struct field should accept underscore in sub-column name") {
Expand Down Expand Up @@ -115,8 +114,7 @@ class DataSourceWithHiveMetastoreCatalogSuite
.saveAsTable("t")
}

val hiveTable =
sessionState.catalog.getTable(TableIdentifier("t", Some("default")))
val hiveTable = sessionState.catalog.getTable(TableIdentifier("t", Some("default")))
assert(hiveTable.storage.inputFormat === Some(inputFormat))
assert(hiveTable.storage.outputFormat === Some(outputFormat))
assert(hiveTable.storage.serde === Some(serde))
Expand Down Expand Up @@ -146,8 +144,7 @@ class DataSourceWithHiveMetastoreCatalogSuite
|AS SELECT 1 AS d1, "val_1" AS d2
""".stripMargin)

val hiveTable =
sessionState.catalog.getTable(TableIdentifier("t", Some("default")))
val hiveTable = sessionState.catalog.getTable(TableIdentifier("t", Some("default")))
assert(hiveTable.storage.inputFormat === Some(inputFormat))
assert(hiveTable.storage.outputFormat === Some(outputFormat))
assert(hiveTable.storage.serde === Some(serde))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ class ListTablesSuite extends QueryTest with TestHiveSingleton with BeforeAndAft
val df = sparkContext.parallelize((1 to 10).map(i => (i, s"str$i"))).toDF("key", "value")

override def beforeAll(): Unit = {
super.beforeAll()
// The catalog in HiveContext is a case insensitive one.
sessionState.catalog.createTempTable(
"ListTablesSuiteTable", df.logicalPlan, ignoreIfExists = true)
Expand All @@ -40,11 +41,15 @@ class ListTablesSuite extends QueryTest with TestHiveSingleton with BeforeAndAft
}

override def afterAll(): Unit = {
sessionState.catalog.dropTable(
TableIdentifier("ListTablesSuiteTable"), ignoreIfNotExists = true)
sql("DROP TABLE IF EXISTS HiveListTablesSuiteTable")
sql("DROP TABLE IF EXISTS ListTablesSuiteDB.HiveInDBListTablesSuiteTable")
sql("DROP DATABASE IF EXISTS ListTablesSuiteDB")
try {
sessionState.catalog.dropTable(
TableIdentifier("ListTablesSuiteTable"), ignoreIfNotExists = true)
sql("DROP TABLE IF EXISTS HiveListTablesSuiteTable")
sql("DROP TABLE IF EXISTS ListTablesSuiteDB.HiveInDBListTablesSuiteTable")
sql("DROP DATABASE IF EXISTS ListTablesSuiteDB")
} finally {
super.afterAll()
}
}

test("get all tables of current database") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
import testImplicits._

protected override def beforeAll(): Unit = {
super.beforeAll()
sql("DROP TABLE IF EXISTS parquet_t0")
sql("DROP TABLE IF EXISTS parquet_t1")
sql("DROP TABLE IF EXISTS parquet_t2")
Expand Down Expand Up @@ -64,11 +65,15 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
}

override protected def afterAll(): Unit = {
sql("DROP TABLE IF EXISTS parquet_t0")
sql("DROP TABLE IF EXISTS parquet_t1")
sql("DROP TABLE IF EXISTS parquet_t2")
sql("DROP TABLE IF EXISTS parquet_t3")
sql("DROP TABLE IF EXISTS t0")
try {
sql("DROP TABLE IF EXISTS parquet_t0")
sql("DROP TABLE IF EXISTS parquet_t1")
sql("DROP TABLE IF EXISTS parquet_t2")
sql("DROP TABLE IF EXISTS parquet_t3")
sql("DROP TABLE IF EXISTS t0")
} finally {
super.afterAll()
}
}

private def checkHiveQl(hiveQl: String): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
var jsonFilePath: String = _

override def beforeAll(): Unit = {
super.beforeAll()
jsonFilePath = Utils.getSparkClassLoader.getResource("sample.json").getFile
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ class VersionsSuite extends SparkFunSuite with Logging {
// connecting to an auto-populated, in-process metastore. Let's make sure we are getting the
// versions right by forcing a known compatibility failure.
// TODO: currently only works on mysql where we manually create the schema...
ignore("failure sanity check") {
test("failure sanity check") {
val e = intercept[Throwable] {
val badClient = quietly {
IsolatedClientLoader.forVersion(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,10 +189,14 @@ abstract class AggregationQuerySuite extends QueryTest with SQLTestUtils with Te
}

override def afterAll(): Unit = {
sqlContext.sql("DROP TABLE IF EXISTS agg1")
sqlContext.sql("DROP TABLE IF EXISTS agg2")
sqlContext.sql("DROP TABLE IF EXISTS agg3")
sqlContext.dropTempTable("emptyTable")
try {
sqlContext.sql("DROP TABLE IF EXISTS agg1")
sqlContext.sql("DROP TABLE IF EXISTS agg2")
sqlContext.sql("DROP TABLE IF EXISTS agg3")
sqlContext.dropTempTable("emptyTable")
} finally {
super.afterAll()
}
}

test("group by function") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ case class ListStringCaseClass(l: Seq[String])
*/
class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {

import hiveContext.{udf, sql}
import hiveContext.udf
import hiveContext.implicits._

test("spark sql udf test that returns a struct") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.apache.spark.sql.test.SQLTestUtils
class WindowQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {

override def beforeAll(): Unit = {
super.beforeAll()
sql("DROP TABLE IF EXISTS part")
sql(
"""
Expand All @@ -50,7 +51,11 @@ class WindowQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleto
}

override def afterAll(): Unit = {
sql("DROP TABLE IF EXISTS part")
try {
sql("DROP TABLE IF EXISTS part")
} finally {
super.afterAll()
}
}

test("windowing.q -- 15. testExpressions") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,12 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
}

override def afterAll(): Unit = {
orcTableDir.delete()
orcTableAsDir.delete()
try {
orcTableDir.delete()
orcTableAsDir.delete()
} finally {
super.afterAll()
}
}

test("create temporary orc table") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -700,6 +700,7 @@ abstract class ParquetPartitioningTest extends QueryTest with SQLTestUtils with
var partitionedTableDirWithKeyAndComplexTypes: File = null

override def beforeAll(): Unit = {
super.beforeAll()
partitionedTableDir = Utils.createTempDir()
normalTableDir = Utils.createTempDir()

Expand Down

0 comments on commit dbcdce6

Please sign in to comment.