diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 609c6c6ec2..66403f1826 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -386,8 +386,8 @@ class Analyzer(override val catalogManager: CatalogManager) DatetimeSub(l, r, DateAddInterval(l, UnaryMinus(r, f), ansiEnabled = f)) case (_, CalendarIntervalType | _: DayTimeIntervalType) => Cast(DatetimeSub(l, r, TimeAdd(l, UnaryMinus(r, f))), l.dataType) - case (TimestampType, _) => SubtractTimestamps(l, r) - case (_, TimestampType) => SubtractTimestamps(l, r) + case _ if AnyTimestampType.unapply(l) || AnyTimestampType.unapply(r) => + SubtractTimestamps(l, r) case (_, DateType) => SubtractDates(l, r) case (DateType, dt) if dt != StringType => DateSub(l, r) case _ => s diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala index 4be143c740..82229d7447 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala @@ -633,10 +633,15 @@ abstract class TypeCoercionBase { case d @ DateSub(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType)) case d @ DateSub(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType)) - case s @ SubtractTimestamps(DateType(), _, _, _) => - s.copy(left = Cast(s.left, TimestampType)) - case s @ SubtractTimestamps(_, DateType(), _, _) => - s.copy(right = Cast(s.right, TimestampType)) + case s @ SubtractTimestamps(DateType(), AnyTimestampType(), _, _) => + s.copy(left = Cast(s.left, s.right.dataType)) + case s @ SubtractTimestamps(AnyTimestampType(), DateType(), _, _) => + s.copy(right = Cast(s.right, s.left.dataType)) + case s @ SubtractTimestamps(AnyTimestampType(), AnyTimestampType(), _, _) + if s.left.dataType != s.right.dataType => + val newLeft = castIfNotSameType(s.left, AnyTimestampType.defaultConcreteType) + val newRight = castIfNotSameType(s.right, AnyTimestampType.defaultConcreteType) + s.copy(left = newLeft, right = newRight) case t @ TimeAdd(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType)) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index d84b6ebee5..c0ae477405 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2650,17 +2650,19 @@ case class SubtractTimestamps( def this(endTimestamp: Expression, startTimestamp: Expression) = this(endTimestamp, startTimestamp, SQLConf.get.legacyIntervalEnabled) - override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, TimestampType) + override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimestampType, AnyTimestampType) override def dataType: DataType = if (legacyInterval) CalendarIntervalType else DayTimeIntervalType() override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = copy(timeZoneId = Option(timeZoneId)) + @transient private lazy val zoneIdInEval: ZoneId = zoneIdForType(left.dataType) + @transient private lazy val evalFunc: (Long, Long) => Any = legacyInterval match { case false => (leftMicros, rightMicros) => - subtractTimestamps(leftMicros, rightMicros, zoneId) + subtractTimestamps(leftMicros, rightMicros, zoneIdInEval) case true => (leftMicros, rightMicros) => new CalendarInterval(0, 0, leftMicros - rightMicros) } @@ -2671,7 +2673,7 @@ case class SubtractTimestamps( override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = legacyInterval match { case false => - val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) + val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (l, r) => s"""$dtu.subtractTimestamps($l, $r, $zid)""") case true => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala index afa091d267..a718a7969b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala @@ -219,3 +219,14 @@ private[sql] abstract class FractionalType extends NumericType { private[sql] val fractional: Fractional[InternalType] private[sql] val asIntegral: Integral[InternalType] } + +private[sql] object AnyTimestampType extends AbstractDataType with Serializable { + override private[sql] def defaultConcreteType: DataType = TimestampWithoutTZType + + override private[sql] def acceptsType(other: DataType): Boolean = + other.isInstanceOf[TimestampType] || other.isInstanceOf[TimestampWithoutTZType] + + override private[sql] def simpleString = "(timestamp or timestamp without time zone)" + + def unapply(e: Expression): Boolean = acceptsType(e.dataType) +} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index 5719b1a723..e6fdb6a88a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -1258,6 +1258,71 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { } } + test("SPARK-35916: timestamps without time zone difference") { + val end = LocalDateTime.parse("2019-10-04T11:04:01.123456") + val epoch = LocalDateTime.ofEpochSecond(0, 0, java.time.ZoneOffset.UTC) + + outstandingTimezonesIds.foreach { tz => + def sub(left: LocalDateTime, right: LocalDateTime): Expression = { + SubtractTimestamps( + Literal(left), + Literal(right), + legacyInterval = true, + timeZoneId = Some(tz)) + } + checkEvaluation(sub(end, end), new CalendarInterval(0, 0, 0)) + checkEvaluation(sub(end, epoch), + IntervalUtils.stringToInterval(UTF8String.fromString("interval " + + "436163 hours 4 minutes 1 seconds 123 milliseconds 456 microseconds"))) + checkEvaluation(sub(epoch, end), + IntervalUtils.stringToInterval(UTF8String.fromString("interval " + + "-436163 hours -4 minutes -1 seconds -123 milliseconds -456 microseconds"))) + checkEvaluation( + sub( + LocalDateTime.parse("9999-12-31T23:59:59.999999"), + LocalDateTime.parse("0001-01-01T00:00:00")), + IntervalUtils.stringToInterval(UTF8String.fromString("interval " + + "87649415 hours 59 minutes 59 seconds 999 milliseconds 999 microseconds"))) + } + + outstandingTimezonesIds.foreach { tz => + def check(left: LocalDateTime, right: LocalDateTime): Unit = { + checkEvaluation( + SubtractTimestamps( + Literal(left), + Literal(right), + legacyInterval = false, + timeZoneId = Some(tz)), + Duration.between( + right.atZone(getZoneId(tz)).toLocalDateTime, + left.atZone(getZoneId(tz)).toLocalDateTime)) + } + + check(end, end) + check(end, epoch) + check(epoch, end) + check(LocalDateTime.parse("9999-12-31T23:59:59.999999"), + LocalDateTime.parse("0001-01-01T00:00:00")) + + val errMsg = intercept[ArithmeticException] { + checkEvaluation( + SubtractTimestamps( + Literal(LocalDateTime.MIN), + Literal(LocalDateTime.MAX), + legacyInterval = false, + timeZoneId = Some(tz)), + Duration.ZERO) + }.getMessage + assert(errMsg.contains("overflow")) + + Seq(false, true).foreach { legacy => + checkConsistencyBetweenInterpretedAndCodegen( + (end: Expression, start: Expression) => SubtractTimestamps(end, start, legacy, Some(tz)), + TimestampWithoutTZType, TimestampWithoutTZType) + } + } + } + test("SPARK-34896: subtract dates") { val end = LocalDate.of(2019, 10, 5) val epochDate = Literal(LocalDate.ofEpochDay(0)) diff --git a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql index d68c9ff299..bd2e7e4a8c 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql @@ -78,6 +78,16 @@ select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'; select timestamp'2019-10-06 10:11:12.345678' - null; select null - timestamp'2019-10-06 10:11:12.345678'; +-- subtract timestamps without time zone +select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678'); +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01'; +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null; +select null - to_timestamp_ntz('2019-10-06 10:11:12.345678'); +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677'); +select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678'); +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678'); +select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678'); + -- date add/sub select date_add('2011-11-11', 1Y); select date_add('2011-11-11', 1S); diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out index 08b01ca0cb..86eb1f80d1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 185 +-- Number of queries: 193 -- !query @@ -412,6 +412,70 @@ struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second> NULL +-- !query +select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678') +-- !query schema +struct<(DATE '2020-01-01' - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +86 13:48:47.654322000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01' +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - DATE '2020-01-01'):interval day to second> +-- !query output +-86 13:48:47.654322000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - NULL):interval day to second> +-- !query output +NULL + + +-- !query +select null - to_timestamp_ntz('2019-10-06 10:11:12.345678') +-- !query schema +struct<(NULL - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +NULL + + +-- !query +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677') +-- !query schema +struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp_ntz(2019-10-06 10:11:12.345677)):interval day to second> +-- !query output +1 00:00:00.000001000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678') +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345677) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second> +-- !query output +-1 00:00:00.000001000 + + +-- !query +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678') +-- !query schema +struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +1 00:00:00.000000000 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678') +-- !query schema +struct<(to_timestamp(2019-10-06 10:11:12.345678) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second> +-- !query output +-1 00:00:00.000000000 + + -- !query select date_add('2011-11-11', 1Y) -- !query schema diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index b4f53b8fdb..3ce5f02f68 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 185 +-- Number of queries: 193 -- !query @@ -389,6 +389,70 @@ struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second> NULL +-- !query +select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678') +-- !query schema +struct<(DATE '2020-01-01' - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +86 13:48:47.654322000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01' +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - DATE '2020-01-01'):interval day to second> +-- !query output +-86 13:48:47.654322000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - NULL):interval day to second> +-- !query output +NULL + + +-- !query +select null - to_timestamp_ntz('2019-10-06 10:11:12.345678') +-- !query schema +struct<(NULL - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +NULL + + +-- !query +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677') +-- !query schema +struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp_ntz(2019-10-06 10:11:12.345677)):interval day to second> +-- !query output +1 00:00:00.000001000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678') +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345677) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second> +-- !query output +-1 00:00:00.000001000 + + +-- !query +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678') +-- !query schema +struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +1 00:00:00.000000000 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678') +-- !query schema +struct<(to_timestamp(2019-10-06 10:11:12.345678) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second> +-- !query output +-1 00:00:00.000000000 + + -- !query select date_add('2011-11-11', 1Y) -- !query schema diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index 1cb46063c8..6c7115c11e 100755 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 185 +-- Number of queries: 193 -- !query @@ -389,6 +389,70 @@ struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second> NULL +-- !query +select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678') +-- !query schema +struct<(DATE '2020-01-01' - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +86 13:48:47.654322000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01' +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - DATE '2020-01-01'):interval day to second> +-- !query output +-86 13:48:47.654322000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - NULL):interval day to second> +-- !query output +NULL + + +-- !query +select null - to_timestamp_ntz('2019-10-06 10:11:12.345678') +-- !query schema +struct<(NULL - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +NULL + + +-- !query +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677') +-- !query schema +struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp_ntz(2019-10-06 10:11:12.345677)):interval day to second> +-- !query output +1 00:00:00.000001000 + + +-- !query +select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678') +-- !query schema +struct<(to_timestamp_ntz(2019-10-06 10:11:12.345677) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second> +-- !query output +-1 00:00:00.000001000 + + +-- !query +select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678') +-- !query schema +struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp(2019-10-06 10:11:12.345678)):interval day to second> +-- !query output +1 00:00:00.000000000 + + +-- !query +select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678') +-- !query schema +struct<(to_timestamp(2019-10-06 10:11:12.345678) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second> +-- !query output +-1 00:00:00.000000000 + + -- !query select date_add('2011-11-11', 1Y) -- !query schema diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index 58c34aafa3..f046438b0f 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -1008,7 +1008,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1017,7 +1017,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1026,7 +1026,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1035,7 +1035,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1408,7 +1408,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1417,7 +1417,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1426,7 +1426,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1435,7 +1435,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index f3e67ad36f..f8ba3e2bb9 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -198,7 +198,7 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7 +cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7 -- !query @@ -781,7 +781,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7 -- !query