diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index e83202d9e5ee3..e8bd377ee064f 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -3267,6 +3267,12 @@ }, "sqlState" : "42000" }, + "INVALID_ZONE_OFFSET": { + "message": [ + ". ZoneOffset represents the time difference from UTC and must be in the range from -18:00 to +18:00." + ], + "sqlState" : "22009" + }, "JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE" : { "message" : [ "The join condition has the invalid type , expected \"BOOLEAN\"." diff --git a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala index 4e94bc6617357..a876139fd4cff 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala @@ -46,7 +46,12 @@ trait SparkDateTimeUtils { // To support the (+|-)hh:m format because it was supported before Spark 3.0. formattedZoneId = singleMinuteTz.matcher(formattedZoneId).replaceFirst("$1$2:0$3") - ZoneId.of(formattedZoneId, ZoneId.SHORT_IDS) + try { + ZoneId.of(formattedZoneId, ZoneId.SHORT_IDS) + } catch { + case e: java.time.DateTimeException => + throw ExecutionErrors.zoneOffsetError(e.getMessage) + } } def getTimeZone(timeZoneId: String): TimeZone = TimeZone.getTimeZone(getZoneId(timeZoneId)) diff --git a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala index 698a7b096e1a5..2ec6f5d43fc84 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala @@ -238,6 +238,14 @@ private[sql] trait ExecutionErrors extends DataTypeErrorsBase { "encoderType" -> encoder.getClass.getName, "docroot" -> SparkBuildInfo.spark_doc_root)) } + + def zoneOffsetError(message: String): SparkDateTimeException = { + new SparkDateTimeException( + errorClass = "INVALID_ZONE_OFFSET", + messageParameters = Map("message" -> message), + context = Array.empty, + summary = "") + } } private[sql] object ExecutionErrors extends ExecutionErrors