Skip to content
This repository has been archived by the owner on Nov 15, 2024. It is now read-only.

Commit

Permalink
[SPARK-22972] Couldn't find corresponding Hive SerDe for data source …
Browse files Browse the repository at this point in the history
…provider org.apache.spark.sql.hive.orc

## What changes were proposed in this pull request?

Fix the warning: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.hive.orc.
This PR is for branch-2.2 and cherry-pick from apache@8032cf8

The old PR is apache#20165

## How was this patch tested?

 Please see test("SPARK-22972: hive orc source")

Author: xubo245 <[email protected]>

Closes apache#20195 from xubo245/HiveSerDeForBranch2.2.
  • Loading branch information
xubo245 authored and MatthewRBruce committed Jul 31, 2018
1 parent bb9c174 commit ef32257
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ object HiveSerDe {
val key = source.toLowerCase(Locale.ROOT) match {
case s if s.startsWith("org.apache.spark.sql.parquet") => "parquet"
case s if s.startsWith("org.apache.spark.sql.orc") => "orc"
case s if s.startsWith("org.apache.spark.sql.hive.orc") => "orc"
case s if s.equals("orcfile") => "orc"
case s if s.equals("parquetfile") => "parquet"
case s if s.equals("avrofile") => "avro"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,12 @@ import java.io.File
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.sql.{QueryTest, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.hive.HiveExternalCatalog
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.internal.HiveSerDe
import org.apache.spark.sql.sources._
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils

Expand Down Expand Up @@ -197,7 +200,7 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
}
}

class OrcSourceSuite extends OrcSuite {
class OrcSourceSuite extends OrcSuite with SQLTestUtils{
override def beforeAll(): Unit = {
super.beforeAll()

Expand Down Expand Up @@ -250,4 +253,31 @@ class OrcSourceSuite extends OrcSuite {
)).get.toString
}
}

test("SPARK-22972: hive orc source") {
val tableName = "normal_orc_as_source_hive"
withTable(tableName) {
spark.sql(
s"""
|CREATE TABLE $tableName
|USING org.apache.spark.sql.hive.orc
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}'
|)
""".stripMargin)

val tableMetadata = spark.sessionState.catalog.getTableMetadata(
TableIdentifier(tableName))
assert(tableMetadata.storage.inputFormat ==
Option("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"))
assert(tableMetadata.storage.outputFormat ==
Option("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"))
assert(tableMetadata.storage.serde ==
Option("org.apache.hadoop.hive.ql.io.orc.OrcSerde"))
assert(HiveSerDe.sourceToSerDe("org.apache.spark.sql.hive.orc")
.equals(HiveSerDe.sourceToSerDe("orc")))
assert(HiveSerDe.sourceToSerDe("org.apache.spark.sql.orc")
.equals(HiveSerDe.sourceToSerDe("orc")))
}
}
}

0 comments on commit ef32257

Please sign in to comment.