diff --git a/sql/api/src/main/scala/org/apache/spark/sql/types/UpCastRule.scala b/sql/api/src/main/scala/org/apache/spark/sql/types/UpCastRule.scala index 4993e249b3059..8f2bdb2a595bf 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/types/UpCastRule.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/types/UpCastRule.scala @@ -40,8 +40,8 @@ private[sql] object UpCastRule { case (DateType, TimestampNTZType) => true case (TimestampNTZType, TimestampType) => true case (TimestampType, TimestampNTZType) => true - case (_: AtomicType, StringType) => true - case (_: CalendarIntervalType, StringType) => true + case (_: AtomicType, _: StringType) => true + case (_: CalendarIntervalType, _: StringType) => true case (NullType, _) => true // Spark supports casting between long and timestamp, please see `longToTimestamp` and diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 154199d37c46d..4ef49cc97f6e6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -281,7 +281,7 @@ object Cast extends QueryErrorsBase { def needsTimeZone(from: DataType, to: DataType): Boolean = (from, to) match { case (VariantType, _) => true case (_: StringType, TimestampType) => true - case (TimestampType, StringType) => true + case (TimestampType, _: StringType) => true case (DateType, TimestampType) => true case (TimestampType, DateType) => true case (TimestampType, TimestampNTZType) => true diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala index e87b54339821f..f6173e019cdeb 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuiteBase.scala @@ -729,6 +729,8 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { assert(Cast.canUpCast(DateType, TimestampNTZType)) assert(Cast.canUpCast(TimestampType, TimestampNTZType)) assert(Cast.canUpCast(TimestampNTZType, TimestampType)) + assert(Cast.canUpCast(IntegerType, StringType("UTF8_LCASE"))) + assert(Cast.canUpCast(CalendarIntervalType, StringType("UTF8_LCASE"))) assert(!Cast.canUpCast(TimestampType, DateType)) assert(!Cast.canUpCast(TimestampNTZType, DateType)) } @@ -1409,4 +1411,10 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { assert(!Cast(timestampLiteral, TimestampNTZType).resolved) assert(!Cast(timestampNTZLiteral, TimestampType).resolved) } + + test("Casting between TimestampType and StringType requires timezone") { + val timestampLiteral = Literal.create(1L, TimestampType) + assert(!Cast(timestampLiteral, StringType).resolved) + assert(!Cast(timestampLiteral, StringType("UTF8_LCASE")).resolved) + } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala index f07ee8b35bbb2..ba3eaf46a5597 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeWriteCompatibilitySuite.scala @@ -685,6 +685,11 @@ abstract class DataTypeWriteCompatibilityBaseSuite extends SparkFunSuite { ) } + test("Check string types: cast allowed regardless of collation") { + assertAllowed(StringType, StringType("UTF8_LCASE"), + "date time types", "Should allow writing string to collated string") + } + // Helper functions def assertAllowed(