Skip to content

Commit

Permalink
[SPARK-40324][SQL][FOLLOWUP] Fix a bug in setting query context in An…
Browse files Browse the repository at this point in the history
…alyzer

### What changes were proposed in this pull request?

This is a followup of #37841. The code change in https://github.com/apache/spark/pull/37841/files#diff-ed19f376a63eba52eea59ca71f3355d4495fad4fad4db9a3324aade0d4986a47R212 is wrong. The context of`analyzed.origin` is not accurate at all. Thus this PR is to revert it
### Why are the changes needed?

Fix a wrong query context in previous PR #37841
### Does this PR introduce _any_ user-facing change?

No

### How was this patch tested?

GA tests

Closes #37861 from gengliangwang/fxiReg.

Authored-by: Gengliang Wang <[email protected]>
Signed-off-by: Gengliang Wang <[email protected]>
  • Loading branch information
gengliangwang committed Sep 14, 2022
1 parent 27f1c70 commit d45b894
Show file tree
Hide file tree
Showing 44 changed files with 1,835 additions and 1,835 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ class AnalysisException protected[sql] (
cause: Option[Throwable] = this.cause,
errorClass: Option[String] = this.errorClass,
messageParameters: Map[String, String] = this.messageParameters,
context: Array[QueryContext] = Array.empty): AnalysisException =
context: Array[QueryContext] = this.context): AnalysisException =
new AnalysisException(message, line, startPosition, plan, cause, errorClass, errorSubClass,
messageParameters, context)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -209,8 +209,7 @@ class Analyzer(override val catalogManager: CatalogManager)
analyzed
} catch {
case e: AnalysisException =>
val ae = e.copy(plan = Option(analyzed),
context = analyzed.origin.getQueryContext)
val ae = e.copy(plan = Option(analyzed))
ae.setStackTrace(e.getStackTrace)
throw ae
}
Expand Down
118 changes: 59 additions & 59 deletions sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -296,9 +296,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "select '2' / interval 2 second"
"fragment" : "'2' / interval 2 second"
} ]
}

Expand All @@ -320,9 +320,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 28,
"fragment" : "select '2' / interval 2 year"
"fragment" : "'2' / interval 2 year"
} ]
}

Expand Down Expand Up @@ -430,9 +430,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 28,
"fragment" : "select 2 / interval '2' year"
"fragment" : "2 / interval '2' year"
} ]
}

Expand All @@ -454,9 +454,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 28,
"fragment" : "select 2 / interval '2' hour"
"fragment" : "2 / interval '2' hour"
} ]
}

Expand All @@ -478,9 +478,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 31,
"fragment" : "select null / interval '2' year"
"fragment" : "null / interval '2' year"
} ]
}

Expand All @@ -502,9 +502,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 31,
"fragment" : "select null / interval '2' hour"
"fragment" : "null / interval '2' hour"
} ]
}

Expand Down Expand Up @@ -1680,9 +1680,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"stopIndex" : 388,
"fragment" : "select\n interval '2' year + '3-3 year to month',\n interval '2' year - '3 month',\n '3-2 year to month' + interval '2-2' year to month,\n '3 year' - interval '2-2' year to month,\n interval '99 11:22:33.123456789' day to second + '12:12 hour to second',\n interval '99 11:22:33.123456789' day to second - '12 hour',\n '4 day' + interval '10' day,\n '4 22 day to hour' - interval '10' day"
"startIndex" : 10,
"stopIndex" : 48,
"fragment" : "interval '2' year + '3-3 year to month'"
} ]
}

Expand Down Expand Up @@ -1720,9 +1720,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 32,
"fragment" : "select interval '2' year + '3-3'"
"fragment" : "interval '2' year + '3-3'"
} ]
}

Expand All @@ -1744,9 +1744,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "select interval '2' year - '4'"
"fragment" : "interval '2' year - '4'"
} ]
}

Expand Down Expand Up @@ -1826,9 +1826,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"stopIndex" : 49,
"fragment" : "select interval '2' year + str from interval_view"
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "interval '2' year + str"
} ]
}

Expand All @@ -1850,9 +1850,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"stopIndex" : 49,
"fragment" : "select interval '2' year - str from interval_view"
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "interval '2' year - str"
} ]
}

Expand Down Expand Up @@ -1951,9 +1951,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 54,
"fragment" : "select interval '3' day - interval '2-2' year to month"
"fragment" : "interval '3' day - interval '2-2' year to month"
} ]
}

Expand Down Expand Up @@ -1984,9 +1984,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "select 1 + interval '2' month"
"fragment" : "1 + interval '2' month"
} ]
}

Expand Down Expand Up @@ -2017,9 +2017,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "select interval '2' month - 1"
"fragment" : "interval '2' month - 1"
} ]
}

Expand Down Expand Up @@ -2888,9 +2888,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 42,
"fragment" : "SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS"
"fragment" : "INTERVAL 1 MONTH > INTERVAL 20 DAYS"
} ]
}

Expand All @@ -2912,9 +2912,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "SELECT INTERVAL '1' DAY < '1'"
"fragment" : "INTERVAL '1' DAY < '1'"
} ]
}

Expand All @@ -2936,9 +2936,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "SELECT INTERVAL '1' DAY = '1'"
"fragment" : "INTERVAL '1' DAY = '1'"
} ]
}

Expand All @@ -2960,9 +2960,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "SELECT INTERVAL '1' DAY > '1'"
"fragment" : "INTERVAL '1' DAY > '1'"
} ]
}

Expand All @@ -2984,9 +2984,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "SELECT '1' < INTERVAL '1' DAY"
"fragment" : "'1' < INTERVAL '1' DAY"
} ]
}

Expand All @@ -3008,9 +3008,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "SELECT '1' = INTERVAL '1' DAY"
"fragment" : "'1' = INTERVAL '1' DAY"
} ]
}

Expand All @@ -3032,9 +3032,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 29,
"fragment" : "SELECT '1' > INTERVAL '1' DAY"
"fragment" : "'1' > INTERVAL '1' DAY"
} ]
}

Expand All @@ -3056,9 +3056,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "SELECT INTERVAL '1' YEAR < '1'"
"fragment" : "INTERVAL '1' YEAR < '1'"
} ]
}

Expand All @@ -3080,9 +3080,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "SELECT INTERVAL '1' YEAR = '1'"
"fragment" : "INTERVAL '1' YEAR = '1'"
} ]
}

Expand All @@ -3104,9 +3104,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "SELECT INTERVAL '1' YEAR > '1'"
"fragment" : "INTERVAL '1' YEAR > '1'"
} ]
}

Expand All @@ -3128,9 +3128,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "SELECT '1' < INTERVAL '1' YEAR"
"fragment" : "'1' < INTERVAL '1' YEAR"
} ]
}

Expand All @@ -3152,9 +3152,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "SELECT '1' = INTERVAL '1' YEAR"
"fragment" : "'1' = INTERVAL '1' YEAR"
} ]
}

Expand All @@ -3176,9 +3176,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 30,
"fragment" : "SELECT '1' > INTERVAL '1' YEAR"
"fragment" : "'1' > INTERVAL '1' YEAR"
} ]
}

Expand Down Expand Up @@ -3298,9 +3298,9 @@ org.apache.spark.sql.AnalysisException
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 1,
"startIndex" : 8,
"stopIndex" : 49,
"fragment" : "SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY)"
"fragment" : "div(INTERVAL '1' MONTH, INTERVAL '-1' DAY)"
} ]
}

Expand Down
Loading

0 comments on commit d45b894

Please sign in to comment.