From 6a248b4639269a01bbd1b8e47d352caa88ec057c Mon Sep 17 00:00:00 2001 From: Grigory Pomadchin Date: Mon, 18 Oct 2021 09:34:20 -0400 Subject: [PATCH] Remove unwarn syntax since anyway this change is not compatible with earlier Spark versions --- .../functions/AggregateFunctions.scala | 14 +------ .../functions/NonAggregateFunctions.scala | 42 ++----------------- .../functions/AggregateFunctionsTests.scala | 4 +- .../NonAggregateFunctionsTests.scala | 18 ++++---- 4 files changed, 16 insertions(+), 62 deletions(-) diff --git a/dataset/src/main/scala/frameless/functions/AggregateFunctions.scala b/dataset/src/main/scala/frameless/functions/AggregateFunctions.scala index 4dae010a5..6b466b3e4 100644 --- a/dataset/src/main/scala/frameless/functions/AggregateFunctions.scala +++ b/dataset/src/main/scala/frameless/functions/AggregateFunctions.scala @@ -6,8 +6,6 @@ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.{functions => sparkFunctions} import frameless.syntax._ -import scala.annotation.nowarn - trait AggregateFunctions { /** Aggregate function: returns the number of items in a group. * @@ -79,24 +77,14 @@ trait AggregateFunctions { * * apache/spark */ - @deprecated("Use sum_distinct", "3.2.0") def sumDistinct[A, T, Out](column: TypedColumn[T, A])( implicit summable: CatalystSummable[A, Out], oencoder: TypedEncoder[Out], aencoder: TypedEncoder[A] - ): TypedAggregate[T, Out] = sum_distinct(column) - - // supress sparkFunstion.sumDistinct call which is used to maintain Spark 3.1.x backwards compat - @nowarn - def sum_distinct[A, T, Out](column: TypedColumn[T, A])( - implicit - summable: CatalystSummable[A, Out], - oencoder: TypedEncoder[Out], - aencoder: TypedEncoder[A] ): TypedAggregate[T, Out] = { val zeroExpr = Literal.create(summable.zero, TypedEncoder[A].catalystRepr) - val sumExpr = expr(sparkFunctions.sumDistinct(column.untyped)) + val sumExpr = expr(sparkFunctions.sum_distinct(column.untyped)) val sumOrZero = Coalesce(Seq(sumExpr, zeroExpr)) new TypedAggregate[T, Out](sumOrZero) diff --git a/dataset/src/main/scala/frameless/functions/NonAggregateFunctions.scala b/dataset/src/main/scala/frameless/functions/NonAggregateFunctions.scala index 478efdbb6..fc0597d42 100644 --- a/dataset/src/main/scala/frameless/functions/NonAggregateFunctions.scala +++ b/dataset/src/main/scala/frameless/functions/NonAggregateFunctions.scala @@ -3,7 +3,6 @@ package functions import org.apache.spark.sql.{Column, functions => sparkFunctions} -import scala.annotation.nowarn import scala.util.matching.Regex trait NonAggregateFunctions { @@ -87,61 +86,34 @@ trait NonAggregateFunctions { * * apache/spark */ - @deprecated("Use shiftrightunsigned", "3.2.0") def shiftRightUnsigned[A, B, T](column: AbstractTypedColumn[T, A], numBits: Int) - (implicit - i0: CatalystBitShift[A, B], - i1: TypedEncoder[B] - ): column.ThisType[T, B] = shiftrightunsigned(column, numBits) - - // supress sparkFunstion.shiftRightUnsigned call which is used to maintain Spark 3.1.x backwards compat - @nowarn - def shiftrightunsigned[A, B, T](column: AbstractTypedColumn[T, A], numBits: Int) (implicit i0: CatalystBitShift[A, B], i1: TypedEncoder[B] ): column.ThisType[T, B] = - column.typed(sparkFunctions.shiftRightUnsigned(column.untyped, numBits)) + column.typed(sparkFunctions.shiftrightunsigned(column.untyped, numBits)) /** Non-Aggregate function: shift the the given value numBits right. If given long, will return long else it will return an integer. * * apache/spark */ - @deprecated("Use shiftright", "3.2.0") def shiftRight[A, B, T](column: AbstractTypedColumn[T, A], numBits: Int) - (implicit - i0: CatalystBitShift[A, B], - i1: TypedEncoder[B] - ): column.ThisType[T, B] = shiftright(column, numBits) - - // supress sparkFunstion.shiftRight call which is used to maintain Spark 3.1.x backwards compat - @nowarn - def shiftright[A, B, T](column: AbstractTypedColumn[T, A], numBits: Int) (implicit i0: CatalystBitShift[A, B], i1: TypedEncoder[B] ): column.ThisType[T, B] = - column.typed(sparkFunctions.shiftRight(column.untyped, numBits)) + column.typed(sparkFunctions.shiftright(column.untyped, numBits)) /** Non-Aggregate function: shift the the given value numBits left. If given long, will return long else it will return an integer. * * apache/spark */ - @deprecated("Use shiftleft", "3.2.0") def shiftLeft[A, B, T](column: AbstractTypedColumn[T, A], numBits: Int) - (implicit - i0: CatalystBitShift[A, B], - i1: TypedEncoder[B] - ): column.ThisType[T, B] = shiftleft(column, numBits) - - // supress sparkFunstion.shiftLeft call which is used to maintain Spark 3.1.x backwards compat - @nowarn - def shiftleft[A, B, T](column: AbstractTypedColumn[T, A], numBits: Int) (implicit i0: CatalystBitShift[A, B], i1: TypedEncoder[B] ): column.ThisType[T, B] = - column.typed(sparkFunctions.shiftLeft(column.untyped, numBits)) + column.typed(sparkFunctions.shiftleft(column.untyped, numBits)) /** Non-Aggregate function: returns the absolute value of a numeric column * @@ -519,14 +491,8 @@ trait NonAggregateFunctions { * * apache/spark */ - @deprecated("Use bitwise_not", "3.2.0") def bitwiseNOT[A: CatalystBitwise, T](column: AbstractTypedColumn[T, A]): column.ThisType[T, A] = - bitwise_not(column) - - // supress sparkFunstion.bitwiseNOT call which is used to maintain Spark 3.1.x backwards compat - @nowarn - def bitwise_not[A: CatalystBitwise, T](column: AbstractTypedColumn[T, A]): column.ThisType[T, A] = - column.typed(sparkFunctions.bitwiseNOT(column.untyped))(column.uencoder) + column.typed(sparkFunctions.bitwise_not(column.untyped))(column.uencoder) /** Non-Aggregate function: file name of the current Spark task. Empty string if row did not originate from * a file diff --git a/dataset/src/test/scala/frameless/functions/AggregateFunctionsTests.scala b/dataset/src/test/scala/frameless/functions/AggregateFunctionsTests.scala index 64f5b3445..8a5479eb6 100644 --- a/dataset/src/test/scala/frameless/functions/AggregateFunctionsTests.scala +++ b/dataset/src/test/scala/frameless/functions/AggregateFunctionsTests.scala @@ -57,7 +57,7 @@ class AggregateFunctionsTests extends TypedDatasetSuite { check(sparkSchema[Short, Long](sum)) } - test("sum_distinct") { + test("sumDistinct") { case class Sum4Tests[A, B](sum: Seq[A] => B) def prop[A: TypedEncoder, Out: TypedEncoder : Numeric](xs: List[A])( @@ -68,7 +68,7 @@ class AggregateFunctionsTests extends TypedDatasetSuite { val dataset = TypedDataset.create(xs.map(X1(_))) val A = dataset.col[A]('a) - val datasetSum: List[Out] = dataset.agg(sum_distinct(A)).collect().run().toList + val datasetSum: List[Out] = dataset.agg(sumDistinct(A)).collect().run().toList datasetSum match { case x :: Nil => approximatelyEqual(summer.sum(xs), x) diff --git a/dataset/src/test/scala/frameless/functions/NonAggregateFunctionsTests.scala b/dataset/src/test/scala/frameless/functions/NonAggregateFunctionsTests.scala index 8205871c2..7f8528e39 100644 --- a/dataset/src/test/scala/frameless/functions/NonAggregateFunctionsTests.scala +++ b/dataset/src/test/scala/frameless/functions/NonAggregateFunctionsTests.scala @@ -176,7 +176,7 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { res ?= resCompare } - test("shiftrightunsigned") { + test("shiftRightUnsigned") { val spark = session import spark.implicits._ @@ -184,7 +184,7 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { (values: List[X1[A]], numBits: Int) (implicit catalystBitShift: CatalystBitShift[A, B], encX1: Encoder[X1[A]]) = { val typedDS = TypedDataset.create(values) - propBitShift(typedDS)(shiftrightunsigned(typedDS('a), numBits), sparkFunctions.shiftrightunsigned, numBits) + propBitShift(typedDS)(shiftRightUnsigned(typedDS('a), numBits), sparkFunctions.shiftrightunsigned, numBits) } check(forAll(prop[Byte, Int] _)) @@ -194,7 +194,7 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { check(forAll(prop[BigDecimal, Int] _)) } - test("shiftright") { + test("shiftRight") { val spark = session import spark.implicits._ @@ -202,7 +202,7 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { (values: List[X1[A]], numBits: Int) (implicit catalystBitShift: CatalystBitShift[A, B], encX1: Encoder[X1[A]]) = { val typedDS = TypedDataset.create(values) - propBitShift(typedDS)(shiftright(typedDS('a), numBits), sparkFunctions.shiftright, numBits) + propBitShift(typedDS)(shiftRight(typedDS('a), numBits), sparkFunctions.shiftright, numBits) } check(forAll(prop[Byte, Int] _)) @@ -212,7 +212,7 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { check(forAll(prop[BigDecimal, Int] _)) } - test("shiftleft") { + test("shiftLeft") { val spark = session import spark.implicits._ @@ -220,7 +220,7 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { (values: List[X1[A]], numBits: Int) (implicit catalystBitShift: CatalystBitShift[A, B], encX1: Encoder[X1[A]]) = { val typedDS = TypedDataset.create(values) - propBitShift(typedDS)(shiftleft(typedDS('a), numBits), sparkFunctions.shiftleft, numBits) + propBitShift(typedDS)(shiftLeft(typedDS('a), numBits), sparkFunctions.shiftleft, numBits) } check(forAll(prop[Byte, Int] _)) @@ -1648,7 +1648,7 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { check(forAll(prop _)) } - test("bitwise_not"){ + test("bitwiseNOT"){ val spark = session import spark.implicits._ @@ -1656,13 +1656,13 @@ class NonAggregateFunctionsTests extends TypedDatasetSuite { (values:List[X1[A]])(implicit encX1:Encoder[X1[A]]) = { val cDS = session.createDataset(values) val resCompare = cDS - .select(sparkFunctions.bitwise_not(cDS("a"))) + .select(sparkFunctions.bitwiseNOT(cDS("a"))) .map(_.getAs[A](0)) .collect().toList val typedDS = TypedDataset.create(values) val res = typedDS - .select(bitwise_not(typedDS('a))) + .select(bitwiseNOT(typedDS('a))) .collect() .run() .toList