Skip to content

Commit

Permalink
Merge branch 'main' into json-suite-fix
Browse files Browse the repository at this point in the history
  • Loading branch information
ayushi-agarwal authored Apr 20, 2024
2 parents ca35d47 + b08258e commit bd8de58
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -371,8 +371,7 @@ abstract class VeloxAggregateFunctionsSuite extends VeloxWholeStageTransformerSu
}
}

// Disable for Spark3.5.
testWithSpecifiedSparkVersion("regr_r2", Some("3.3"), Some("3.4")) {
testWithSpecifiedSparkVersion("regr_r2", Some("3.3")) {
runQueryAndCompare("""
|select regr_r2(l_partkey, l_suppkey) from lineitem;
|""".stripMargin) {
Expand All @@ -391,8 +390,7 @@ abstract class VeloxAggregateFunctionsSuite extends VeloxWholeStageTransformerSu
}
}

// Disable for Spark3.5.
testWithSpecifiedSparkVersion("regr_slope", Some("3.4"), Some("3.4")) {
testWithSpecifiedSparkVersion("regr_slope", Some("3.4")) {
runQueryAndCompare("""
|select regr_slope(l_partkey, l_suppkey) from lineitem;
|""".stripMargin) {
Expand All @@ -411,8 +409,7 @@ abstract class VeloxAggregateFunctionsSuite extends VeloxWholeStageTransformerSu
}
}

// Disable for Sparke3.5.
testWithSpecifiedSparkVersion("regr_intercept", Some("3.4"), Some("3.4")) {
testWithSpecifiedSparkVersion("regr_intercept", Some("3.4")) {
runQueryAndCompare("""
|select regr_intercept(l_partkey, l_suppkey) from lineitem;
|""".stripMargin) {
Expand All @@ -431,8 +428,7 @@ abstract class VeloxAggregateFunctionsSuite extends VeloxWholeStageTransformerSu
}
}

// Disable for Sparke3.5.
testWithSpecifiedSparkVersion("regr_sxy regr_sxx regr_syy", Some("3.4"), Some("3.4")) {
testWithSpecifiedSparkVersion("regr_sxy regr_sxx regr_syy", Some("3.4")) {
runQueryAndCompare("""
|select regr_sxy(l_quantity, l_tax) from lineitem;
|""".stripMargin) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,12 @@
package org.apache.gluten.execution

import org.apache.spark.SparkConf
import org.apache.spark.sql.functions.col

import org.apache.iceberg.spark.SparkWriteOptions
import org.scalatest.Ignore

import java.io.File

// Ignored due to failures, see https://github.com/apache/incubator-gluten/issues/5362
@Ignore
class VeloxTPCHIcebergSuite extends VeloxTPCHSuite {

protected val tpchBasePath: String = new File(
Expand Down Expand Up @@ -97,21 +95,21 @@ class VeloxTPCHIcebergSuite extends VeloxTPCHSuite {
}
}

// Ignored due to failures, see https://github.com/apache/incubator-gluten/issues/5362
@Ignore
class VeloxPartitionedTableTPCHIcebergSuite extends VeloxTPCHIcebergSuite {
override protected def createTPCHNotNullTables(): Unit = {
TPCHTables.map {
table =>
val tablePath = new File(resourcePath, table.name).getAbsolutePath
val tableDF = spark.read.format(fileFormat).load(tablePath)
val tableDF = spark.read
.format(fileFormat)
.load(tablePath)
.repartition(table.partitionColumns.map(col): _*)
.sortWithinPartitions(table.partitionColumns.map(col): _*)

tableDF
.repartition(800)
.write
tableDF.write
.format("iceberg")
.partitionBy(table.partitionColumns: _*)
.option(SparkWriteOptions.FANOUT_ENABLED, "true")
.option(SparkWriteOptions.FANOUT_ENABLED, "false")
.mode("overwrite")
.saveAsTable(table.name)
(table.name, tableDF)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import org.apache.spark.sql.{AnalysisException, SparkSession}
import org.apache.spark.sql.catalyst.{ExtendedAnalysisException, InternalRow}
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.{BloomFilterAggregate, TypedImperativeAggregate}
import org.apache.spark.sql.catalyst.expressions.aggregate.{BloomFilterAggregate, RegrIntercept, RegrR2, RegrReplacement, RegrSlope, RegrSXY, TypedImperativeAggregate}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.plans.physical.{ClusteredDistribution, Distribution}
import org.apache.spark.sql.catalyst.rules.Rule
Expand Down Expand Up @@ -71,7 +71,15 @@ class Spark35Shims extends SparkShims {
Sig[Empty2Null](ExpressionNames.EMPTY2NULL))
}

override def aggregateExpressionMappings: Seq[Sig] = Seq.empty
override def aggregateExpressionMappings: Seq[Sig] = {
Seq(
Sig[RegrR2](ExpressionNames.REGR_R2),
Sig[RegrSlope](ExpressionNames.REGR_SLOPE),
Sig[RegrIntercept](ExpressionNames.REGR_INTERCEPT),
Sig[RegrSXY](ExpressionNames.REGR_SXY),
Sig[RegrReplacement](ExpressionNames.REGR_REPLACEMENT)
)
}

override def convertPartitionTransforms(
partitions: Seq[Transform]): (Seq[String], Option[BucketSpec]) = {
Expand Down

0 comments on commit bd8de58

Please sign in to comment.