From 83b6fc331f471205f7f6a72c4c408f84a944108e Mon Sep 17 00:00:00 2001 From: scwf Date: Wed, 31 Dec 2014 00:14:31 +0800 Subject: [PATCH] minor fix --- .../apache/spark/sql/json/JSONRelation.scala | 3 +-- .../org/apache/spark/sql/sources/ddl.scala | 21 ++++++++++++++----- .../apache/spark/sql/sources/interfaces.scala | 4 +--- .../org/apache/spark/sql/QueryTest.scala | 2 +- 4 files changed, 19 insertions(+), 11 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala index 8f835b8517e61..ab013885b7086 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala @@ -43,8 +43,7 @@ private[sql] case class JSONRelation( private def baseRDD = sqlContext.sparkContext.textFile(fileName) - override val schema = - userSpecifiedSchema.getOrElse( + override val schema = userSpecifiedSchema.getOrElse( JsonRDD.inferSchema( baseRDD, samplingRatio, diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala index 12c66e24115ad..42f5d81933593 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala @@ -100,9 +100,15 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi protected lazy val pair: Parser[(String, String)] = ident ~ stringLit ^^ { case k ~ v => (k,v) } protected lazy val column: Parser[StructField] = - ident ~ ident ^^ { case name ~ typ => + ( ident ~ ident ^^ { case name ~ typ => StructField(name, metastoreTypes.toDataType(typ)) } + | + ident ~ ("decimal" ~ "(" ~> numericLit) ~ ("," ~> numericLit <~ ")") ^^ { + case name ~ precision ~ scale => + StructField(name, DecimalType(precision.toInt, scale.toInt)) + } + ) } /** @@ -121,8 +127,8 @@ private[sql] class MetastoreTypes extends RegexParsers { "bigint" ^^^ LongType | "binary" ^^^ BinaryType | "boolean" ^^^ BooleanType | - fixedDecimalType | // Hive 0.13+ decimal with precision/scale - "decimal" ^^^ DecimalType.Unlimited | // Hive 0.12 decimal with no precision/scale + fixedDecimalType | // decimal with precision/scale + "decimal" ^^^ DecimalType.Unlimited | // decimal with no precision/scale "date" ^^^ DateType | "timestamp" ^^^ TimestampType | "varchar\\((\\d+)\\)".r ^^^ StringType @@ -204,8 +210,13 @@ private[sql] case class CreateTableUsing( } val dataSource = clazz.newInstance().asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider] - val relation = dataSource.createRelation( - sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols))) + val relation = if(tableCols.isEmpty) { + dataSource.createRelation( + sqlContext, new CaseInsensitiveMap(options)) + } else { + dataSource.createRelation( + sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols))) + } sqlContext.baseRelationToSchemaRDD(relation).registerTempTable(tableName) Seq.empty diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala index 3f53dbc952c81..6ae6dc9395062 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala @@ -41,9 +41,7 @@ trait RelationProvider { * Note: the parameters' keywords are case insensitive and this insensitivity is enforced * by the Map that is passed to the function. */ - def createRelation( - sqlContext: SQLContext, - parameters: Map[String, String]): BaseRelation + def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala index a4ea95c07360e..3d9f0cbf80fe7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala @@ -70,7 +70,7 @@ class QueryTest extends PlanTest { """.stripMargin) } - if (prepareAnswer(convertedAnswer) != prepareAnswer(sparkAnswer)) { // issues here, sparkAnswer may be GenericRow[] + if (prepareAnswer(convertedAnswer) != prepareAnswer(sparkAnswer)) { fail(s""" |Results do not match for query: |${rdd.logicalPlan}