Skip to content

Commit

Permalink
remove cleanIdentifier
Browse files Browse the repository at this point in the history
  • Loading branch information
scwf committed Jan 9, 2015
1 parent f336a16 commit a852b10
Showing 1 changed file with 2 additions and 9 deletions.
11 changes: 2 additions & 9 deletions sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi

protected lazy val column: Parser[StructField] =
ident ~ dataType ^^ { case columnName ~ typ =>
StructField(cleanIdentifier(columnName), typ)
StructField(columnName, typ)
}

protected lazy val primitiveType: Parser[DataType] =
Expand Down Expand Up @@ -157,7 +157,7 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi

protected lazy val structField: Parser[StructField] =
ident ~ ":" ~ dataType ^^ {
case fieldName ~ _ ~ tpe => StructField(cleanIdentifier(fieldName), tpe, nullable = true)
case fieldName ~ _ ~ tpe => StructField(fieldName, tpe, nullable = true)
}

protected lazy val structType: Parser[DataType] =
Expand All @@ -173,13 +173,6 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
mapType |
structType |
primitiveType

protected val escapedIdentifier = "`([^`]+)`".r
/** Strips backticks from ident if present */
protected def cleanIdentifier(ident: String): String = ident match {
case escapedIdentifier(i) => i
case plainIdent => plainIdent
}
}

private[sql] case class CreateTableUsing(
Expand Down

0 comments on commit a852b10

Please sign in to comment.