Skip to content

Commit

Permalink
Refactor: no pushdown when disabled
Browse files Browse the repository at this point in the history
Signed-off-by: Ziy1-Tan <[email protected]>
  • Loading branch information
Ziy1-Tan committed Sep 22, 2023
1 parent 0bcbcc1 commit 4ffa007
Showing 1 changed file with 20 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -51,47 +51,44 @@ case class GarScanBuilder(
filters
}

override def pushedFilters(): Array[Filter] = {
formatName match {
case "csv" => Array.empty
case "orc" => pushedOrcFilters
case "parquet" => pushedParquetFilters
case _ => throw new IllegalArgumentException
}
override def pushedFilters(): Array[Filter] = formatName match {
case "csv" => Array.empty[Filter]
case "orc" => pushedOrcFilters
case "parquet" => pushedParquetFilters
case _ => throw new IllegalArgumentException
}

private lazy val pushedParquetFilters = {
private lazy val pushedParquetFilters: Array[Filter] = {
if (!sparkSession.sessionState.conf.parquetFilterPushDown) {
Array.empty
Array.empty[Filter]
} else {
val builder =
ParquetScanBuilder(sparkSession, fileIndex, schema, dataSchema, options)
builder.pushFilters(this.filters)
builder.pushedFilters()
}

val builder =
ParquetScanBuilder(sparkSession, fileIndex, schema, dataSchema, options)
builder.pushFilters(this.filters)
builder.pushedFilters()
}

private lazy val pushedOrcFilters = {
private lazy val pushedOrcFilters: Array[Filter] = {
if (!sparkSession.sessionState.conf.orcFilterPushDown) {
Array.empty
Array.empty[Filter]
} else {
val builder =
OrcScanBuilder(sparkSession, fileIndex, schema, dataSchema, options)
builder.pushFilters(this.filters)
builder.pushedFilters()
}

val builder =
OrcScanBuilder(sparkSession, fileIndex, schema, dataSchema, options)
builder.pushFilters(this.filters)
builder.pushedFilters()
}

// Check if the file format supports nested schema pruning.
override protected val supportsNestedSchemaPruning: Boolean = {
override protected val supportsNestedSchemaPruning: Boolean =
formatName match {
case "csv" => false
case "orc" => sparkSession.sessionState.conf.nestedSchemaPruningEnabled
case "parquet" =>
sparkSession.sessionState.conf.nestedSchemaPruningEnabled
case _ => throw new IllegalArgumentException
}
}

/** Build the file scan for GarDataSource. */
override def build(): Scan = {
Expand Down

0 comments on commit 4ffa007

Please sign in to comment.