Skip to content

Commit

Permalink
[GLUTEN-5341] Fix and enable delta UTs for Spark3.5 (#5393)
Browse files Browse the repository at this point in the history
  • Loading branch information
yma11 authored Apr 15, 2024
1 parent 5e4b14c commit ba01f8b
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,11 @@ case class DeltaScanTransformer(
override lazy val fileFormat: ReadFileFormat = ReadFileFormat.ParquetReadFormat

override protected def doValidateInternal(): ValidationResult = {
if (requiredSchema.fields.exists(_.name == "__delta_internal_is_row_deleted")) {
if (
requiredSchema.fields.exists(
_.name == "__delta_internal_is_row_deleted") || requiredSchema.fields.exists(
_.name == "__delta_internal_row_index")
) {
return ValidationResult.notOk(s"Deletion vector is not supported in native.")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
}

// IdMapping is supported in Delta 2.2 (related to Spark3.3.1)
// Disable for Spark3.5.
testWithSpecifiedSparkVersion("column mapping mode = id", Some("3.3"), Some("3.4")) {
testWithSpecifiedSparkVersion("column mapping mode = id", Some("3.3")) {
withTable("delta_cm1") {
spark.sql(s"""
|create table delta_cm1 (id int, name string) using delta
Expand All @@ -63,8 +62,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
}

// NameMapping is supported in Delta 2.0 (related to Spark3.2.0)
// Disable for Spark3.5.
testWithSpecifiedSparkVersion("column mapping mode = name", Some("3.2"), Some("3.4")) {
testWithSpecifiedSparkVersion("column mapping mode = name", Some("3.2")) {
withTable("delta_cm2") {
spark.sql(s"""
|create table delta_cm2 (id int, name string) using delta
Expand All @@ -83,8 +81,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
}
}

// Disable for Spark3.5.
testWithSpecifiedSparkVersion("delta: time travel", Some("3.3"), Some("3.4")) {
testWithSpecifiedSparkVersion("delta: time travel", Some("3.3")) {
withTable("delta_tm") {
spark.sql(s"""
|create table delta_tm (id int, name string) using delta
Expand All @@ -109,8 +106,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
}
}

// Disable for Spark3.5.
testWithSpecifiedSparkVersion("delta: partition filters", Some("3.2"), Some("3.4")) {
testWithSpecifiedSparkVersion("delta: partition filters", Some("3.2")) {
withTable("delta_pf") {
spark.sql(s"""
|create table delta_pf (id int, name string) using delta partitioned by (name)
Expand All @@ -129,11 +125,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
}
}

// Disable for Spark3.5.
testWithSpecifiedSparkVersion(
"basic test with stats.skipping disabled",
Some("3.2"),
Some("3.4")) {
testWithSpecifiedSparkVersion("basic test with stats.skipping disabled", Some("3.2")) {
withTable("delta_test2") {
withSQLConf("spark.databricks.delta.stats.skipping" -> "false") {
spark.sql(s"""
Expand All @@ -153,8 +145,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
}
}

// Disable for Spark3.5.
testWithSpecifiedSparkVersion("column mapping with complex type", Some("3.2"), Some("3.4")) {
testWithSpecifiedSparkVersion("column mapping with complex type", Some("3.2")) {
withTable("t1") {
val simpleNestedSchema = new StructType()
.add("a", StringType, true)
Expand Down Expand Up @@ -204,8 +195,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
}
}

// Disable for Spark3.5
testWithSpecifiedSparkVersion("deletion vector", Some("3.4"), Some("3.4")) {
testWithSpecifiedSparkVersion("deletion vector", Some("3.4")) {
withTempPath {
p =>
import testImplicits._
Expand Down

0 comments on commit ba01f8b

Please sign in to comment.