From 4acf89e9b7d9b1d36435f4eedf907dc5cb20d4ac Mon Sep 17 00:00:00 2001 From: dengziming Date: Thu, 26 Oct 2023 10:33:12 +0800 Subject: [PATCH] Resolve comments --- .../main/resources/error/error-classes.json | 28 +++++------- ...ons-invalid-parameter-value-error-class.md | 8 ++++ docs/sql-error-conditions.md | 18 -------- .../expressions/collectionOperations.scala | 4 +- .../sql/errors/QueryExecutionErrors.scala | 26 ++++++----- .../CollectionExpressionsSuite.scala | 11 +++-- .../spark/sql/CollectionFunctionsSuite.scala | 45 ------------------- .../errors/QueryExecutionErrorsSuite.scala | 32 ++++++++++++- 8 files changed, 73 insertions(+), 99 deletions(-) delete mode 100644 sql/core/src/test/scala/org/apache/spark/sql/CollectionFunctionsSuite.scala diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index c327b2f23fd22..121635a164236 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -1968,6 +1968,11 @@ "expects one of the units without quotes YEAR, QUARTER, MONTH, WEEK, DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, but got the string literal ." ] }, + "LENGTH" : { + "message" : [ + "Expects `length` greater than or equal to 0, but got ." + ] + }, "NULL" : { "message" : [ "expects a non-NULL value." @@ -1983,6 +1988,11 @@ "Expects group index between 0 and , but got ." ] }, + "START" : { + "message" : [ + "Expects `start` to start at 1 or start from the end if start is negative, but got ." + ] + }, "ZERO_INDEX" : { "message" : [ "expects %1$, %2$ and so on, but got %0$." @@ -3029,18 +3039,6 @@ ], "sqlState" : "42846" }, - "UNEXPECTED_VALUE_FOR_LENGTH_IN_SLICE_FUNCTION" : { - "message" : [ - "Unexpected value for length in function : length must be greater than or equal to 0." - ], - "sqlState" : "22003" - }, - "UNEXPECTED_VALUE_FOR_START_IN_SLICE_FUNCTION" : { - "message" : [ - "Unexpected value for start in function : SQL array indices start at 1." - ], - "sqlState" : "22003" - }, "UNKNOWN_PROTOBUF_MESSAGE_TYPE" : { "message" : [ "Attempting to treat as a Message, but it was ." @@ -3225,12 +3223,6 @@ ], "sqlState" : "0A000" }, - "UNSUPPORTED_DATA_TYPE_FOR_SIZE_FUNCTION" : { - "message" : [ - "The size function doesn't support the operand type ." - ], - "sqlState" : "42K09" - }, "UNSUPPORTED_DEFAULT_VALUE" : { "message" : [ "DEFAULT column values is not supported." diff --git a/docs/sql-error-conditions-invalid-parameter-value-error-class.md b/docs/sql-error-conditions-invalid-parameter-value-error-class.md index 8186a56314dea..0db1c360524be 100644 --- a/docs/sql-error-conditions-invalid-parameter-value-error-class.md +++ b/docs/sql-error-conditions-invalid-parameter-value-error-class.md @@ -49,6 +49,10 @@ expects an integer value in [0, ``), but got ``. expects one of the units without quotes YEAR, QUARTER, MONTH, WEEK, DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, but got the string literal ``. +## LENGTH + +Expects `length` greater than or equal to 0, but got ``. + ## NULL expects a non-NULL value. @@ -61,6 +65,10 @@ expects a non-NULL value. Expects group index between 0 and ``, but got ``. +## START + +Expects `start` to start at 1 or start from the end if start is negative, but got ``. + ## ZERO_INDEX expects `%1$`, `%2$` and so on, but got `%0$`. diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md index 2537eca2e5484..f22e527374604 100644 --- a/docs/sql-error-conditions.md +++ b/docs/sql-error-conditions.md @@ -1953,18 +1953,6 @@ Cannot invoke function `` because it contains positional argument( The class `` has an unexpected expression serializer. Expects "STRUCT" or "IF" which returns "STRUCT" but found ``. -### UNEXPECTED_VALUE_FOR_LENGTH_IN_SLICE_FUNCTION - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Unexpected value for length in function ``: length must be greater than or equal to 0. - -### UNEXPECTED_VALUE_FOR_START_IN_SLICE_FUNCTION - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Unexpected value for start in function ``: SQL array indices start at 1. - ### UNKNOWN_PROTOBUF_MESSAGE_TYPE [SQLSTATE: 42K0G](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) @@ -2109,12 +2097,6 @@ Unsupported data type ``. The `` datasource doesn't support the column `` of the type ``. -### UNSUPPORTED_DATA_TYPE_FOR_SIZE_FUNCTION - -[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The size function doesn't support the operand type ``. - ### [UNSUPPORTED_DEFAULT_VALUE](sql-error-conditions-unsupported-default-value-error-class.html) [SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index 759000bc5f52a..e83817e66c9a9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -1761,14 +1761,14 @@ case class Slice(x: Expression, start: Expression, length: Expression) val lengthInt = lengthVal.asInstanceOf[Int] val arr = xVal.asInstanceOf[ArrayData] val startIndex = if (startInt == 0) { - throw QueryExecutionErrors.unexpectedValueForStartInFunctionError(prettyName) + throw QueryExecutionErrors.unexpectedValueForStartInFunctionError(prettyName, startInt) } else if (startInt < 0) { startInt + arr.numElements() } else { startInt - 1 } if (lengthInt < 0) { - throw QueryExecutionErrors.unexpectedValueForLengthInFunctionError(prettyName) + throw QueryExecutionErrors.unexpectedValueForLengthInFunctionError(prettyName, lengthInt) } // startIndex can be negative if start is negative and its absolute value is greater than the // number of elements in the array diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index c71077e290d44..50a8aea3babc3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -1368,25 +1368,29 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE } def unsupportedOperandTypeForSizeFunctionError( - dataType: DataType): SparkUnsupportedOperationException = { - new SparkUnsupportedOperationException( - errorClass = "UNSUPPORTED_DATA_TYPE_FOR_SIZE_FUNCTION", - messageParameters = Map( - "dataType" -> dataType.getClass.getCanonicalName)) + dataType: DataType): Throwable = { + SparkException.internalError( + s"The size function doesn't support the operand type ${toSQLType(dataType)}") } - def unexpectedValueForStartInFunctionError(prettyName: String): SparkRuntimeException = { + def unexpectedValueForStartInFunctionError( + prettyName: String, start: Int): SparkRuntimeException = { new SparkRuntimeException( - errorClass = "UNEXPECTED_VALUE_FOR_START_IN_SLICE_FUNCTION", + errorClass = "INVALID_PARAMETER_VALUE.START", messageParameters = Map( - "prettyName" -> prettyName)) + "parameter" -> "start", + "start" -> start.toString, + "functionName" -> toSQLId(prettyName))) } - def unexpectedValueForLengthInFunctionError(prettyName: String): SparkRuntimeException = { + def unexpectedValueForLengthInFunctionError( + prettyName: String, length: Int): SparkRuntimeException = { new SparkRuntimeException( - errorClass = "UNEXPECTED_VALUE_FOR_LENGTH_IN_SLICE_FUNCTION", + errorClass = "INVALID_PARAMETER_VALUE.LENGTH", messageParameters = Map( - "prettyName" -> prettyName)) + "parameter" -> "length", + "length" -> length.toString, + "functionName" -> toSQLId(prettyName))) } def invalidIndexOfZeroError(context: SQLQueryContext): RuntimeException = { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala index 80febfd8cda92..9f29764b8dbbf 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala @@ -32,12 +32,14 @@ import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils} import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{outstandingZoneIds, LA, UTC} import org.apache.spark.sql.catalyst.util.IntervalUtils._ +import org.apache.spark.sql.errors.DataTypeErrorsBase import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.array.ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH import org.apache.spark.unsafe.types.UTF8String -class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { +class CollectionExpressionsSuite + extends SparkFunSuite with ExpressionEvalHelper with DataTypeErrorsBase { implicit def stringToUTF8Str(str: String): UTF8String = UTF8String.fromString(str) @@ -87,14 +89,15 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper } test("Unsupported data type for size()") { - val exception = intercept[org.apache.spark.SparkUnsupportedOperationException] { + val exception = intercept[org.apache.spark.SparkException] { Size(Literal.create("str", StringType)).eval(EmptyRow) } checkError( exception = exception, - errorClass = "UNSUPPORTED_DATA_TYPE_FOR_SIZE_FUNCTION", + errorClass = "INTERNAL_ERROR", parameters = Map( - "dataType" -> StringType.getClass.getCanonicalName + "message" -> ("The size function doesn't support the operand type " + + toSQLType(StringType)) )) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CollectionFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CollectionFunctionsSuite.scala deleted file mode 100644 index e967a9b8e4663..0000000000000 --- a/sql/core/src/test/scala/org/apache/spark/sql/CollectionFunctionsSuite.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql - -import org.apache.spark.{SparkFunSuite, SparkRuntimeException} -import org.apache.spark.sql.test.SharedSparkSession - -class CollectionFunctionsSuite extends SparkFunSuite with SharedSparkSession { - - test("Unexpected `start` for slice()") { - checkError( - exception = intercept[SparkRuntimeException] { - sql("select slice(array(1,2,3), 0, 1)").collect() - }, - errorClass = "UNEXPECTED_VALUE_FOR_START_IN_SLICE_FUNCTION", - parameters = Map("prettyName" -> "slice") - ) - } - - test("Unexpected `length` for slice()") { - checkError( - exception = intercept[SparkRuntimeException] { - sql("select slice(array(1,2,3), 1, -1)").collect() - }, - errorClass = "UNEXPECTED_VALUE_FOR_LENGTH_IN_SLICE_FUNCTION", - parameters = Map("prettyName" -> "slice") - ) - } - -} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index d133270e2956c..d6847213d6bde 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -36,6 +36,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext import org.apache.spark.sql.catalyst.expressions.objects.InitializeJavaBean import org.apache.spark.sql.catalyst.rules.RuleIdCollection import org.apache.spark.sql.catalyst.util.BadRecordException +import org.apache.spark.sql.errors.DataTypeErrorsBase import org.apache.spark.sql.execution.datasources.jdbc.{DriverRegistry, JDBCOptions} import org.apache.spark.sql.execution.datasources.jdbc.connection.ConnectionProvider import org.apache.spark.sql.execution.datasources.orc.OrcTest @@ -55,7 +56,8 @@ class QueryExecutionErrorsSuite extends QueryTest with ParquetTest with OrcTest - with SharedSparkSession { + with SharedSparkSession + with DataTypeErrorsBase { import testImplicits._ @@ -1016,6 +1018,34 @@ class QueryExecutionErrorsSuite ) } } + + test("Unexpected `start` for slice()") { + checkError( + exception = intercept[SparkRuntimeException] { + sql("select slice(array(1,2,3), 0, 1)").collect() + }, + errorClass = "INVALID_PARAMETER_VALUE.START", + parameters = Map( + "parameter" -> "start", + "start" -> 0.toString, + "functionName" -> toSQLId("slice") + ) + ) + } + + test("Unexpected `length` for slice()") { + checkError( + exception = intercept[SparkRuntimeException] { + sql("select slice(array(1,2,3), 1, -1)").collect() + }, + errorClass = "INVALID_PARAMETER_VALUE.LENGTH", + parameters = Map( + "parameter" -> "length", + "start" -> (-1).toString, + "functionName" -> toSQLId("slice") + ) + ) + } } class FakeFileSystemSetPermission extends LocalFileSystem {