[SPARK-35916][SQL] Support subtraction among Date/Timestamp/TimestampWithoutTZ

### What changes were proposed in this pull request?

Support the following operations:

- TimestampWithoutTZ - Date
- Date - TimestampWithoutTZ
- TimestampWithoutTZ - Timestamp
- Timestamp - TimestampWithoutTZ
- TimestampWithoutTZ - TimestampWithoutTZ

For subtraction between `TimestampWithoutTZ` and `Timestamp`, the `Timestamp` column is cast as TimestampWithoutTZType.

### Why are the changes needed?

Support basic subtraction among Date/Timestamp/TimestampWithoutTZ.

### Does this PR introduce _any_ user-facing change?

No, the timestamp without time zone type is not release yet.

### How was this patch tested?

Unit tests

Closes #33115 from gengliangwang/subtractTimestampWithoutTz.

Authored-by: Gengliang Wang <gengliang@apache.org>
Signed-off-by: Gengliang Wang <gengliang@apache.org>
This commit is contained in:
Gengliang Wang 2021-06-29 14:45:09 +08:00
parent 0a7a6f750c
commit 7635114d53
11 changed files with 307 additions and 22 deletions

View file

@ -386,8 +386,8 @@ class Analyzer(override val catalogManager: CatalogManager)
DatetimeSub(l, r, DateAddInterval(l, UnaryMinus(r, f), ansiEnabled = f))
case (_, CalendarIntervalType | _: DayTimeIntervalType) =>
Cast(DatetimeSub(l, r, TimeAdd(l, UnaryMinus(r, f))), l.dataType)
case (TimestampType, _) => SubtractTimestamps(l, r)
case (_, TimestampType) => SubtractTimestamps(l, r)
case _ if AnyTimestampType.unapply(l) || AnyTimestampType.unapply(r) =>
SubtractTimestamps(l, r)
case (_, DateType) => SubtractDates(l, r)
case (DateType, dt) if dt != StringType => DateSub(l, r)
case _ => s

View file

@ -633,10 +633,15 @@ abstract class TypeCoercionBase {
case d @ DateSub(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
case d @ DateSub(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
case s @ SubtractTimestamps(DateType(), _, _, _) =>
s.copy(left = Cast(s.left, TimestampType))
case s @ SubtractTimestamps(_, DateType(), _, _) =>
s.copy(right = Cast(s.right, TimestampType))
case s @ SubtractTimestamps(DateType(), AnyTimestampType(), _, _) =>
s.copy(left = Cast(s.left, s.right.dataType))
case s @ SubtractTimestamps(AnyTimestampType(), DateType(), _, _) =>
s.copy(right = Cast(s.right, s.left.dataType))
case s @ SubtractTimestamps(AnyTimestampType(), AnyTimestampType(), _, _)
if s.left.dataType != s.right.dataType =>
val newLeft = castIfNotSameType(s.left, AnyTimestampType.defaultConcreteType)
val newRight = castIfNotSameType(s.right, AnyTimestampType.defaultConcreteType)
s.copy(left = newLeft, right = newRight)
case t @ TimeAdd(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType))
}

View file

@ -2650,17 +2650,19 @@ case class SubtractTimestamps(
def this(endTimestamp: Expression, startTimestamp: Expression) =
this(endTimestamp, startTimestamp, SQLConf.get.legacyIntervalEnabled)
override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, TimestampType)
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimestampType, AnyTimestampType)
override def dataType: DataType =
if (legacyInterval) CalendarIntervalType else DayTimeIntervalType()
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
@transient private lazy val zoneIdInEval: ZoneId = zoneIdForType(left.dataType)
@transient
private lazy val evalFunc: (Long, Long) => Any = legacyInterval match {
case false => (leftMicros, rightMicros) =>
subtractTimestamps(leftMicros, rightMicros, zoneId)
subtractTimestamps(leftMicros, rightMicros, zoneIdInEval)
case true => (leftMicros, rightMicros) =>
new CalendarInterval(0, 0, leftMicros - rightMicros)
}
@ -2671,7 +2673,7 @@ case class SubtractTimestamps(
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = legacyInterval match {
case false =>
val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName)
val zid = ctx.addReferenceObj("zoneId", zoneIdInEval, classOf[ZoneId].getName)
val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
defineCodeGen(ctx, ev, (l, r) => s"""$dtu.subtractTimestamps($l, $r, $zid)""")
case true =>

View file

@ -219,3 +219,14 @@ private[sql] abstract class FractionalType extends NumericType {
private[sql] val fractional: Fractional[InternalType]
private[sql] val asIntegral: Integral[InternalType]
}
private[sql] object AnyTimestampType extends AbstractDataType with Serializable {
override private[sql] def defaultConcreteType: DataType = TimestampWithoutTZType
override private[sql] def acceptsType(other: DataType): Boolean =
other.isInstanceOf[TimestampType] || other.isInstanceOf[TimestampWithoutTZType]
override private[sql] def simpleString = "(timestamp or timestamp without time zone)"
def unapply(e: Expression): Boolean = acceptsType(e.dataType)
}

View file

@ -1258,6 +1258,71 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
}
}
test("SPARK-35916: timestamps without time zone difference") {
val end = LocalDateTime.parse("2019-10-04T11:04:01.123456")
val epoch = LocalDateTime.ofEpochSecond(0, 0, java.time.ZoneOffset.UTC)
outstandingTimezonesIds.foreach { tz =>
def sub(left: LocalDateTime, right: LocalDateTime): Expression = {
SubtractTimestamps(
Literal(left),
Literal(right),
legacyInterval = true,
timeZoneId = Some(tz))
}
checkEvaluation(sub(end, end), new CalendarInterval(0, 0, 0))
checkEvaluation(sub(end, epoch),
IntervalUtils.stringToInterval(UTF8String.fromString("interval " +
"436163 hours 4 minutes 1 seconds 123 milliseconds 456 microseconds")))
checkEvaluation(sub(epoch, end),
IntervalUtils.stringToInterval(UTF8String.fromString("interval " +
"-436163 hours -4 minutes -1 seconds -123 milliseconds -456 microseconds")))
checkEvaluation(
sub(
LocalDateTime.parse("9999-12-31T23:59:59.999999"),
LocalDateTime.parse("0001-01-01T00:00:00")),
IntervalUtils.stringToInterval(UTF8String.fromString("interval " +
"87649415 hours 59 minutes 59 seconds 999 milliseconds 999 microseconds")))
}
outstandingTimezonesIds.foreach { tz =>
def check(left: LocalDateTime, right: LocalDateTime): Unit = {
checkEvaluation(
SubtractTimestamps(
Literal(left),
Literal(right),
legacyInterval = false,
timeZoneId = Some(tz)),
Duration.between(
right.atZone(getZoneId(tz)).toLocalDateTime,
left.atZone(getZoneId(tz)).toLocalDateTime))
}
check(end, end)
check(end, epoch)
check(epoch, end)
check(LocalDateTime.parse("9999-12-31T23:59:59.999999"),
LocalDateTime.parse("0001-01-01T00:00:00"))
val errMsg = intercept[ArithmeticException] {
checkEvaluation(
SubtractTimestamps(
Literal(LocalDateTime.MIN),
Literal(LocalDateTime.MAX),
legacyInterval = false,
timeZoneId = Some(tz)),
Duration.ZERO)
}.getMessage
assert(errMsg.contains("overflow"))
Seq(false, true).foreach { legacy =>
checkConsistencyBetweenInterpretedAndCodegen(
(end: Expression, start: Expression) => SubtractTimestamps(end, start, legacy, Some(tz)),
TimestampWithoutTZType, TimestampWithoutTZType)
}
}
}
test("SPARK-34896: subtract dates") {
val end = LocalDate.of(2019, 10, 5)
val epochDate = Literal(LocalDate.ofEpochDay(0))

View file

@ -78,6 +78,16 @@ select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
select timestamp'2019-10-06 10:11:12.345678' - null;
select null - timestamp'2019-10-06 10:11:12.345678';
-- subtract timestamps without time zone
select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678');
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01';
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null;
select null - to_timestamp_ntz('2019-10-06 10:11:12.345678');
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677');
select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678');
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678');
select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678');
-- date add/sub
select date_add('2011-11-11', 1Y);
select date_add('2011-11-11', 1S);

View file

@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 185
-- Number of queries: 193
-- !query
@ -412,6 +412,70 @@ struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
NULL
-- !query
select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678')
-- !query schema
struct<(DATE '2020-01-01' - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
86 13:48:47.654322000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01'
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - DATE '2020-01-01'):interval day to second>
-- !query output
-86 13:48:47.654322000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - NULL):interval day to second>
-- !query output
NULL
-- !query
select null - to_timestamp_ntz('2019-10-06 10:11:12.345678')
-- !query schema
struct<(NULL - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
NULL
-- !query
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677')
-- !query schema
struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp_ntz(2019-10-06 10:11:12.345677)):interval day to second>
-- !query output
1 00:00:00.000001000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345677) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
-- !query output
-1 00:00:00.000001000
-- !query
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678')
-- !query schema
struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
1 00:00:00.000000000
-- !query
select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
-- !query schema
struct<(to_timestamp(2019-10-06 10:11:12.345678) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
-- !query output
-1 00:00:00.000000000
-- !query
select date_add('2011-11-11', 1Y)
-- !query schema

View file

@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 185
-- Number of queries: 193
-- !query
@ -389,6 +389,70 @@ struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
NULL
-- !query
select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678')
-- !query schema
struct<(DATE '2020-01-01' - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
86 13:48:47.654322000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01'
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - DATE '2020-01-01'):interval day to second>
-- !query output
-86 13:48:47.654322000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - NULL):interval day to second>
-- !query output
NULL
-- !query
select null - to_timestamp_ntz('2019-10-06 10:11:12.345678')
-- !query schema
struct<(NULL - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
NULL
-- !query
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677')
-- !query schema
struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp_ntz(2019-10-06 10:11:12.345677)):interval day to second>
-- !query output
1 00:00:00.000001000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345677) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
-- !query output
-1 00:00:00.000001000
-- !query
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678')
-- !query schema
struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
1 00:00:00.000000000
-- !query
select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
-- !query schema
struct<(to_timestamp(2019-10-06 10:11:12.345678) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
-- !query output
-1 00:00:00.000000000
-- !query
select date_add('2011-11-11', 1Y)
-- !query schema

View file

@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 185
-- Number of queries: 193
-- !query
@ -389,6 +389,70 @@ struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
NULL
-- !query
select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678')
-- !query schema
struct<(DATE '2020-01-01' - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
86 13:48:47.654322000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01'
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - DATE '2020-01-01'):interval day to second>
-- !query output
-86 13:48:47.654322000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - NULL):interval day to second>
-- !query output
NULL
-- !query
select null - to_timestamp_ntz('2019-10-06 10:11:12.345678')
-- !query schema
struct<(NULL - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
NULL
-- !query
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677')
-- !query schema
struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp_ntz(2019-10-06 10:11:12.345677)):interval day to second>
-- !query output
1 00:00:00.000001000
-- !query
select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
-- !query schema
struct<(to_timestamp_ntz(2019-10-06 10:11:12.345677) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
-- !query output
-1 00:00:00.000001000
-- !query
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678')
-- !query schema
struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp(2019-10-06 10:11:12.345678)):interval day to second>
-- !query output
1 00:00:00.000000000
-- !query
select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
-- !query schema
struct<(to_timestamp(2019-10-06 10:11:12.345678) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
-- !query output
-1 00:00:00.000000000
-- !query
select date_add('2011-11-11', 1Y)
-- !query schema

View file

@ -1008,7 +1008,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(3, 0)) FRO
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7
-- !query
@ -1017,7 +1017,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(5, 0)) FRO
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7
-- !query
@ -1026,7 +1026,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(10, 0)) FR
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7
-- !query
@ -1035,7 +1035,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(20, 0)) FR
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7
-- !query
@ -1408,7 +1408,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7
cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7
-- !query
@ -1417,7 +1417,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7
cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7
-- !query
@ -1426,7 +1426,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7
cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7
-- !query
@ -1435,7 +1435,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7
cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7
-- !query

View file

@ -198,7 +198,7 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7
cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7
-- !query
@ -781,7 +781,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7
cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7
-- !query