Skip to content

Commit

Permalink
[SPARK-23000] Use fully qualified table names in HiveMetastoreCatalog…
Browse files Browse the repository at this point in the history
…Suite

## What changes were proposed in this pull request?

In another attempt to fix DataSourceWithHiveMetastoreCatalogSuite, this patch uses qualified table names (`default.t`) in the individual tests.

## How was this patch tested?

N/A (Test Only Change)

Author: Sameer Agarwal <[email protected]>

Closes #20273 from sameeragarwal/flaky-test.

(cherry picked from commit c7572b7)
Signed-off-by: gatorsmile <[email protected]>
  • Loading branch information
sameeragarwal authored and gatorsmile committed Jan 16, 2018
1 parent bb8e5ad commit e2ffb97
Showing 1 changed file with 14 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -166,13 +166,13 @@ class DataSourceWithHiveMetastoreCatalogSuite
))
).foreach { case (provider, (inputFormat, outputFormat, serde)) =>
test(s"Persist non-partitioned $provider relation into metastore as managed table") {
withTable("t") {
withTable("default.t") {
withSQLConf(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> "true") {
testDF
.write
.mode(SaveMode.Overwrite)
.format(provider)
.saveAsTable("t")
.saveAsTable("default.t")
}

val hiveTable = sessionState.catalog.getTableMetadata(TableIdentifier("t", Some("default")))
Expand All @@ -187,14 +187,15 @@ class DataSourceWithHiveMetastoreCatalogSuite
assert(columns.map(_.name) === Seq("d1", "d2"))
assert(columns.map(_.dataType) === Seq(DecimalType(10, 3), StringType))

checkAnswer(table("t"), testDF)
assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1.1\t1", "2.1\t2"))
checkAnswer(table("default.t"), testDF)
assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM default.t") ===
Seq("1.1\t1", "2.1\t2"))
}
}

test(s"Persist non-partitioned $provider relation into metastore as external table") {
withTempPath { dir =>
withTable("t") {
withTable("default.t") {
val path = dir.getCanonicalFile

withSQLConf(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> "true") {
Expand All @@ -203,7 +204,7 @@ class DataSourceWithHiveMetastoreCatalogSuite
.mode(SaveMode.Overwrite)
.format(provider)
.option("path", path.toString)
.saveAsTable("t")
.saveAsTable("default.t")
}

val hiveTable =
Expand All @@ -219,18 +220,18 @@ class DataSourceWithHiveMetastoreCatalogSuite
assert(columns.map(_.name) === Seq("d1", "d2"))
assert(columns.map(_.dataType) === Seq(DecimalType(10, 3), StringType))

checkAnswer(table("t"), testDF)
assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") ===
checkAnswer(table("default.t"), testDF)
assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM default.t") ===
Seq("1.1\t1", "2.1\t2"))
}
}
}

test(s"Persist non-partitioned $provider relation into metastore as managed table using CTAS") {
withTempPath { dir =>
withTable("t") {
withTable("default.t") {
sql(
s"""CREATE TABLE t USING $provider
s"""CREATE TABLE default.t USING $provider
|OPTIONS (path '${dir.toURI}')
|AS SELECT 1 AS d1, "val_1" AS d2
""".stripMargin)
Expand All @@ -248,8 +249,9 @@ class DataSourceWithHiveMetastoreCatalogSuite
assert(columns.map(_.name) === Seq("d1", "d2"))
assert(columns.map(_.dataType) === Seq(IntegerType, StringType))

checkAnswer(table("t"), Row(1, "val_1"))
assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM t") === Seq("1\tval_1"))
checkAnswer(table("default.t"), Row(1, "val_1"))
assert(sparkSession.metadataHive.runSqlHive("SELECT * FROM default.t") ===
Seq("1\tval_1"))
}
}
}
Expand Down

0 comments on commit e2ffb97

Please sign in to comment.