diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index 99fbc585f9813..93846e66df521 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -40,7 +40,7 @@ "AMBIGUOUS_COLUMN_REFERENCE" : { "message" : [ "Column is ambiguous. It's because you joined several DataFrame together, and some of these DataFrames are the same.", - "This column points to one of the DataFrame but Spark is unable to figure out which one.", + "This column points to one of the DataFrames but Spark is unable to figure out which one.", "Please alias the DataFrames with different names via `DataFrame.alias` before joining them,", "and specify the column using qualified name, e.g. `df.alias(\"a\").join(df.alias(\"b\"), col(\"a.id\") > col(\"b.id\"))`." ], @@ -6184,17 +6184,17 @@ }, "_LEGACY_ERROR_TEMP_2109" : { "message" : [ - "Cannot build HashedRelation with more than 1/3 billions unique keys." + "Cannot build HashedRelation with more than 1/3 billion unique keys." ] }, "_LEGACY_ERROR_TEMP_2110" : { "message" : [ - "Can not build a HashedRelation that is larger than 8G." + "Cannot build a HashedRelation that is larger than 8G." ] }, "_LEGACY_ERROR_TEMP_2111" : { "message" : [ - "failed to push a row into ." + "Failed to push a row into ." ] }, "_LEGACY_ERROR_TEMP_2112" : { diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala index b4920c7cb841d..5dd536eeb3046 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala @@ -323,11 +323,11 @@ class BlockManagerMasterEndpoint( val isAlive = try { driverEndpoint.askSync[Boolean](CoarseGrainedClusterMessages.IsExecutorAlive(executorId)) } catch { - // ignore the non-fatal error from driverEndpoint since the caller doesn't really - // care about the return result of removing blocks. And so we could avoid breaking + // Ignore the non-fatal error from driverEndpoint since the caller doesn't really + // care about the return result of removing blocks. That way we avoid breaking // down the whole application. case NonFatal(e) => - logError(s"Fail to know the executor $executorId is alive or not.", e) + logError(s"Cannot determine whether executor $executorId is alive or not.", e) false } if (!isAlive) { diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md index b6b159f277c02..165f0a1d94af1 100644 --- a/docs/sql-error-conditions.md +++ b/docs/sql-error-conditions.md @@ -71,7 +71,7 @@ Column or field `` is ambiguous and has `` matches. [SQLSTATE: 42702](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) Column `` is ambiguous. It's because you joined several DataFrame together, and some of these DataFrames are the same. -This column points to one of the DataFrame but Spark is unable to figure out which one. +This column points to one of the DataFrames but Spark is unable to figure out which one. Please alias the DataFrames with different names via `DataFrame.alias` before joining them, and specify the column using qualified name, e.g. `df.alias("a").join(df.alias("b"), col("a.id") > col("b.id"))`. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala index dc617046c4303..111e233c04e32 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala @@ -102,7 +102,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser { test("SPARK-44801: Analyzer failure shall show the query in failed table") { spark = creatSparkSessionWithUI - intercept[Exception](spark.sql("SELECT * FROM I_AM_A_INVISIBLE_TABLE").isEmpty) + intercept[Exception](spark.sql("SELECT * FROM I_AM_AN_INVISIBLE_TABLE").isEmpty) eventually(timeout(10.seconds), interval(100.milliseconds)) { val sd = findErrorMessageOnSQLUI() assert(sd.size === 1, "Analyze fail shall show the query in failed table")