Skip to content

Commit

Permalink
add back UserDefinedFunction.inputTypes
Browse files Browse the repository at this point in the history
  • Loading branch information
cloud-fan committed Sep 3, 2018
1 parent 64bbd13 commit e9c6fbc
Showing 1 changed file with 9 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,18 @@ import org.apache.spark.sql.types.DataType
case class UserDefinedFunction protected[sql] (
f: AnyRef,
dataType: DataType,
inputTypes: Option[Seq[ScalaReflection.Schema]]) {
inputSchemas: Option[Seq[ScalaReflection.Schema]]) {

private var _nameOption: Option[String] = None
private var _nullable: Boolean = true
private var _deterministic: Boolean = true

// This is to keep backward compatibility for this case class.
// TODO: revisit this case class in Spark 3.0, and narrow down the public surface.
def inputTypes: Option[Seq[DataType]] = {
inputSchemas.map(_.map(_.dataType))
}

/**
* Returns true when the UDF can return a nullable value.
*
Expand All @@ -73,11 +79,11 @@ case class UserDefinedFunction protected[sql] (
f,
dataType,
exprs.map(_.expr),
inputTypes.map(_.map(_.dataType)).getOrElse(Nil),
inputSchemas.map(_.map(_.dataType)).getOrElse(Nil),
udfName = _nameOption,
nullable = _nullable,
udfDeterministic = _deterministic,
nullableTypes = inputTypes.map(_.map(_.nullable)).getOrElse(Nil)))
nullableTypes = inputSchemas.map(_.map(_.nullable)).getOrElse(Nil)))
}

private def copyAll(): UserDefinedFunction = {
Expand Down

0 comments on commit e9c6fbc

Please sign in to comment.