Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-20754][SQL] Support TRUNC (number) #18106

Closed
wants to merge 19 commits into from
Closed
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions python/pyspark/sql/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1028,9 +1028,10 @@ def to_timestamp(col, format=None):


@since(1.5)
def trunc(date, format):
def trunc(data, format):
"""
Returns date truncated to the unit specified by the format.
Returns date truncated to the unit specified by the format or
number truncated by specified decimal places.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

in the latter case where data is a number, the 2nd parameter as called format seems a bit out of place?


:param format: 'year', 'YYYY', 'yy' or 'month', 'mon', 'mm'
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And we need to update this param doc.


Expand All @@ -1039,9 +1040,16 @@ def trunc(date, format):
[Row(year=datetime.date(1997, 1, 1))]
>>> df.select(trunc(df.d, 'mon').alias('month')).collect()
[Row(month=datetime.date(1997, 2, 1))]
>>> df = spark.createDataFrame([(1234567891.1234567891,)], ['d'])
>>> df.select(trunc(df.d, 4).alias('positive')).collect()
[Row(positive=1234567891.1234)]
>>> df.select(trunc(df.d, -4).alias('negative')).collect()
[Row(negative=1234560000.0)]
>>> df.select(trunc(df.d, 0).alias('zero')).collect()
[Row(zero=1234567891.0)]
"""
sc = SparkContext._active_spark_context
return Column(sc._jvm.functions.trunc(_to_java_column(date), format))
return Column(sc._jvm.functions.trunc(_to_java_column(data), format))


@since(1.5)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,6 @@ object FunctionRegistry {
expression[ParseToDate]("to_date"),
expression[ToUnixTimestamp]("to_unix_timestamp"),
expression[ToUTCTimestamp]("to_utc_timestamp"),
expression[TruncDate]("trunc"),
expression[UnixTimestamp]("unix_timestamp"),
expression[DayOfWeek]("dayofweek"),
expression[WeekOfYear]("weekofyear"),
Expand Down Expand Up @@ -424,6 +423,7 @@ object FunctionRegistry {
expression[CurrentDatabase]("current_database"),
expression[CallMethodViaReflection]("reflect"),
expression[CallMethodViaReflection]("java_method"),
expression[Trunc]("trunc"),

// grouping sets
expression[Cube]("cube"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1267,86 +1267,6 @@ case class ParseToTimestamp(left: Expression, format: Option[Expression], child:
override def dataType: DataType = TimestampType
}

/**
* Returns date truncated to the unit specified by the format.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(date, fmt) - Returns `date` with the time portion of the day truncated to the unit specified by the format model `fmt`.",
extended = """
Examples:
> SELECT _FUNC_('2009-02-12', 'MM');
2009-02-01
> SELECT _FUNC_('2015-10-27', 'YEAR');
2015-01-01
""")
// scalastyle:on line.size.limit
case class TruncDate(date: Expression, format: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = date
override def right: Expression = format

override def inputTypes: Seq[AbstractDataType] = Seq(DateType, StringType)
override def dataType: DataType = DateType
override def nullable: Boolean = true
override def prettyName: String = "trunc"

private lazy val truncLevel: Int =
DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])

override def eval(input: InternalRow): Any = {
val level = if (format.foldable) {
truncLevel
} else {
DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
}
if (level == -1) {
// unknown format
null
} else {
val d = date.eval(input)
if (d == null) {
null
} else {
DateTimeUtils.truncDate(d.asInstanceOf[Int], level)
}
}
}

override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")

if (format.foldable) {
if (truncLevel == -1) {
ev.copy(code = s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};""")
} else {
val d = date.genCode(ctx)
ev.copy(code = s"""
${d.code}
boolean ${ev.isNull} = ${d.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $dtu.truncDate(${d.value}, $truncLevel);
}""")
}
} else {
nullSafeCodeGen(ctx, ev, (dateVal, fmt) => {
val form = ctx.freshName("form")
s"""
int $form = $dtu.parseTruncLevel($fmt);
if ($form == -1) {
${ev.isNull} = true;
} else {
${ev.value} = $dtu.truncDate($dateVal, $form);
}
"""
})
}
}
}

/**
* Returns the number of days from startDate to endDate.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import java.util.UUID

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.{BigDecimalUtils, DateTimeUtils}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

Expand Down Expand Up @@ -132,3 +133,141 @@ case class Uuid() extends LeafExpression {
s"UTF8String.fromString(java.util.UUID.randomUUID().toString());", isNull = "false")
}
}

/**
* Returns date truncated to the unit specified by the format or
* numeric truncated to scale decimal places.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(data[, fmt]) - Returns `data` truncated by the format model `fmt`.
Copy link
Member

@viirya viirya Jun 23, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fmt -> expr or trunc_expr?

If `data` is DateType, returns `data` with the time portion of the day truncated to the unit specified by the format model `fmt`.
If `data` is DecimalType/DoubleType, returns `data` truncated to `fmt` decimal places.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please also describe default values (MM or 0).

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also, I would describe the types without class names (e.g., date).

""",
extended = """
Examples:
> SELECT _FUNC_('2009-02-12', 'MM');
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As it doesn't extends ImplicitCastInputTypes anymore, I think you can't directly use string as date parameter?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And I don't think we should drop this support.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess this also drops the support of other types (e.g., timestamp) basically as we don't allow implicit cast (e.g, SELECT trunc(timestamp('2009-02-12'), 'MM'))

Copy link
Member Author

@wangyum wangyum Jun 26, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, This is what I worry about, because StringType can be implicit to date and double, I plan to change it like this:

  override def inputTypes: Seq[AbstractDataType] = dataType match {
    case NullType => Seq(dataType, TypeCollection(StringType, IntegerType))
    case DateType | StringType | TimestampType => Seq(DateType, StringType)
    case DoubleType | DecimalType.Fixed(_, _) => Seq(dataType, IntegerType)
    case _ => Seq(TypeCollection(DateType, DoubleType, DecimalType),
      TypeCollection(StringType, IntegerType))
  }

2009-02-01.
> SELECT _FUNC_('2015-10-27', 'YEAR');
2015-01-01
> SELECT _FUNC_('1989-03-13');
1989-03-01
> SELECT _FUNC_(1234567891.1234567891, 4);
1234567891.1234
> SELECT _FUNC_(1234567891.1234567891, -4);
1234560000
> SELECT _FUNC_(1234567891.1234567891);
1234567891
""")
// scalastyle:on line.size.limit
case class Trunc(data: Expression, format: Expression)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

truncParam?

extends BinaryExpression with ImplicitCastInputTypes {

def this(data: Expression) = {
this(data, Literal(if (data.dataType.isInstanceOf[DateType]) "MM" else 0))
}

override def left: Expression = data
override def right: Expression = format

val isTruncNumber = format.dataType.isInstanceOf[IntegerType]

override def dataType: DataType = data.dataType

override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(DateType, DoubleType, DecimalType),
TypeCollection(StringType, IntegerType))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this might lead to wrong input types combinations such as (DoubleType, StringType) and (DateType, IntegerType)?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we are going to have only trunc for truncating number and datetime. We should prevent wrong input types.


override def nullable: Boolean = true

override def prettyName: String = "trunc"

private lazy val truncFormat: Int = if (isTruncNumber) {
format.eval().asInstanceOf[Int]
} else {
DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
}

override def eval(input: InternalRow): Any = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

override nullSafeEval?

val d = data.eval(input)
val form = format.eval()
if (null == d || null == form) {
null
} else {
if (isTruncNumber) {
val scale = if (format.foldable) truncFormat else format.eval().asInstanceOf[Int]
data.dataType match {
case DoubleType => BigDecimalUtils.trunc(d.asInstanceOf[Double], scale)
case DecimalType.Fixed(_, _) =>
BigDecimalUtils.trunc(d.asInstanceOf[Decimal].toJavaBigDecimal, scale)
}
} else {
val level = if (format.foldable) {
truncFormat
} else {
DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
}
if (level == -1) {
// unknown format
null
} else {
DateTimeUtils.truncDate(d.asInstanceOf[Int], level)
}
}
}
}

override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {

if (isTruncNumber) {
val bdu = BigDecimalUtils.getClass.getName.stripSuffix("$")

if (format.foldable) {
val d = data.genCode(ctx)
ev.copy(code = s"""
${d.code}
boolean ${ev.isNull} = ${d.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $bdu.trunc(${d.value}, $truncFormat);
}""")
} else {
nullSafeCodeGen(ctx, ev, (doubleVal, fmt) => {
s"${ev.value} = $bdu.trunc($doubleVal, $fmt);"
})
}
} else {
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")

if (format.foldable) {
if (truncFormat == -1) {
ev.copy(code = s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};""")
} else {
val d = data.genCode(ctx)
ev.copy(code = s"""
${d.code}
boolean ${ev.isNull} = ${d.isNull};
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $dtu.truncDate(${d.value}, $truncFormat);
}""")
}
} else {
nullSafeCodeGen(ctx, ev, (dateVal, fmt) => {
val form = ctx.freshName("form")
s"""
int $form = $dtu.parseTruncLevel($fmt);
if ($form == -1) {
${ev.isNull} = true;
} else {
${ev.value} = $dtu.truncDate($dateVal, $form);
}
"""
})
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.util

import java.math.{BigDecimal => JBigDecimal}

/**
* Helper functions for BigDecimal.
*/
object BigDecimalUtils {

/**
* Returns double type input truncated to scale decimal places.
*/
def trunc(input: Double, scale: Int): Double = {
trunc(JBigDecimal.valueOf(input), scale).doubleValue()
}

/**
* Returns BigDecimal type input truncated to scale decimal places.
*/
def trunc(input: JBigDecimal, scale: Int): JBigDecimal = {
// Copy from (https://github.com/apache/hive/blob/release-2.3.0-rc0
// /ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java#L471-L487)
val pow = if (scale >= 0) {
JBigDecimal.valueOf(Math.pow(10, scale))
} else {
JBigDecimal.valueOf(Math.pow(10, Math.abs(scale)))
}

val out = if (scale > 0) {
val longValue = input.multiply(pow).longValue()
JBigDecimal.valueOf(longValue).divide(pow)
} else if (scale == 0) {
JBigDecimal.valueOf(input.longValue())
} else {
val longValue = input.divide(pow).longValue()
JBigDecimal.valueOf(longValue).multiply(pow)
}
out
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -527,27 +527,6 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
NextDay(Literal(Date.valueOf("2015-07-23")), Literal.create(null, StringType)), null)
}

test("function trunc") {
def testTrunc(input: Date, fmt: String, expected: Date): Unit = {
checkEvaluation(TruncDate(Literal.create(input, DateType), Literal.create(fmt, StringType)),
expected)
checkEvaluation(
TruncDate(Literal.create(input, DateType), NonFoldableLiteral.create(fmt, StringType)),
expected)
}
val date = Date.valueOf("2015-07-22")
Seq("yyyy", "YYYY", "year", "YEAR", "yy", "YY").foreach { fmt =>
testTrunc(date, fmt, Date.valueOf("2015-01-01"))
}
Seq("month", "MONTH", "mon", "MON", "mm", "MM").foreach { fmt =>
testTrunc(date, fmt, Date.valueOf("2015-07-01"))
}
testTrunc(date, "DD", null)
testTrunc(date, null, null)
testTrunc(null, "MON", null)
testTrunc(null, null, null)
}

test("from_unixtime") {
val sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
val fmt2 = "yyyy-MM-dd HH:mm:ss.SSS"
Expand Down
Loading