From 9c0e4e18bf3a05ac925429d46339a871a24659ac Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Sun, 24 May 2015 18:36:04 -0700 Subject: [PATCH] Remove last use of convertToScala(). --- .../spark/sql/catalyst/CatalystTypeConverters.scala | 9 --------- .../spark/sql/catalyst/expressions/generators.scala | 10 +++++++--- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala index 586d8167b9272..b838f92255554 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala @@ -353,15 +353,6 @@ object CatalystTypeConverters { case other => other } - /** - * Converts Catalyst types used internally in rows to standard Scala types - * This method is slow, and for batch conversion you should be using converter - * produced by createToScalaConverter. - */ - def convertToScala(catalystValue: Any, dataType: DataType): Any = { - getConverterForType(dataType).toScala(catalystValue) - } - /** * Creates a converter function that will convert Catalyst types to Scala type. * Typical use case would be converting a collection of rows that have the same schema. You will diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala index 634138010fd21..c8f9aacc0baad 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala @@ -71,12 +71,16 @@ case class UserDefinedGenerator( children: Seq[Expression]) extends Generator { + private[this] val inputRow: InterpretedProjection = new InterpretedProjection(children) + private[this] val convertToScala: (Row) => Row = { + val inputSchema = StructType(children.map(e => StructField(e.simpleString, e.dataType, true))) + CatalystTypeConverters.createToScalaConverter(inputSchema) + }.asInstanceOf[(Row => Row)] + override def eval(input: Row): TraversableOnce[Row] = { // TODO(davies): improve this // Convert the objects into Scala Type before calling function, we need schema to support UDT - val inputSchema = StructType(children.map(e => StructField(e.simpleString, e.dataType, true))) - val inputRow = new InterpretedProjection(children) - function(CatalystTypeConverters.convertToScala(inputRow(input), inputSchema).asInstanceOf[Row]) + function(convertToScala(inputRow(input))) } override def toString: String = s"UserDefinedGenerator(${children.mkString(",")})"