[SPARK-36170][SQL] Change quoted interval literal (interval constructor) to be converted to ANSI interval types

### What changes were proposed in this pull request?

This PR changes the behavior of the quoted interval literals like `SELECT INTERVAL '1 year 2 month'` to be converted to ANSI interval types.

### Why are the changes needed?

The tnit-to-unit interval literals and the unit list interval literals are converted to ANSI interval types but quoted interval literals are still converted to CalendarIntervalType.

```
-- Unit list interval literals
spark-sql> select interval 1 year 2 month;
1-2
-- Quoted interval literals
spark-sql> select interval '1 year 2 month';
1 years 2 months
```

### Does this PR introduce _any_ user-facing change?

Yes but the following sentence in `sql-migration-guide.md` seems to cover this change.
```
  - In Spark 3.2, the unit list interval literals can not mix year-month fields (YEAR and MONTH) and day-time fields (WEEK, DAY, ..., MICROSECOND).
For example, `INTERVAL 1 day 1 hour` is invalid in Spark 3.2. In Spark 3.1 and earlier,
there is no such limitation and the literal returns value of `CalendarIntervalType`.
To restore the behavior before Spark 3.2, you can set `spark.sql.legacy.interval.enabled` to `true`.
```

### How was this patch tested?

Modified existing tests and add new tests.

Closes #33380 from sarutak/fix-interval-constructor.

Authored-by: Kousuke Saruta <sarutak@oss.nttdata.com>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
This commit is contained in:
Kousuke Saruta 2021-07-17 12:23:37 +03:00 committed by Max Gekk
parent 8009f0dd92
commit 71ea25d4f5
11 changed files with 284 additions and 139 deletions

View file

@ -2165,7 +2165,15 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
ex.setStackTrace(e.getStackTrace)
throw ex
}
Literal(interval, CalendarIntervalType)
if (!conf.legacyIntervalEnabled) {
val units = value
.split("\\s")
.map(_.toLowerCase(Locale.ROOT).stripSuffix("s"))
.filter(s => s != "interval" && s.matches("[a-z]+"))
constructMultiUnitsIntervalLiteral(ctx, interval, units)
} else {
Literal(interval, CalendarIntervalType)
}
case "X" =>
val padding = if (value.length % 2 != 0) "0" else ""
Literal(DatatypeConverter.parseHexBinary(padding + value))
@ -2372,6 +2380,44 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
UnresolvedTableOrView(visitMultipartIdentifier(ctx), commandName, allowTempView)
}
/**
* Construct an [[Literal]] from [[CalendarInterval]] and
* units represented as a [[Seq]] of [[String]].
*/
private def constructMultiUnitsIntervalLiteral(
ctx: ParserRuleContext,
calendarInterval: CalendarInterval,
units: Seq[String]): Literal = {
val yearMonthFields = Set.empty[Byte]
val dayTimeFields = Set.empty[Byte]
for (unit <- units) {
if (YearMonthIntervalType.stringToField.contains(unit)) {
yearMonthFields += YearMonthIntervalType.stringToField(unit)
} else if (DayTimeIntervalType.stringToField.contains(unit)) {
dayTimeFields += DayTimeIntervalType.stringToField(unit)
} else if (unit == "week") {
dayTimeFields += DayTimeIntervalType.DAY
} else {
assert(unit == "millisecond" || unit == "microsecond")
dayTimeFields += DayTimeIntervalType.SECOND
}
}
if (yearMonthFields.nonEmpty) {
if (dayTimeFields.nonEmpty) {
val literalStr = source(ctx)
throw QueryParsingErrors.mixedIntervalUnitsError(literalStr, ctx)
}
Literal(
calendarInterval.months,
YearMonthIntervalType(yearMonthFields.min, yearMonthFields.max)
)
} else {
Literal(
IntervalUtils.getDuration(calendarInterval, TimeUnit.MICROSECONDS),
DayTimeIntervalType(dayTimeFields.min, dayTimeFields.max))
}
}
/**
* Create a [[CalendarInterval]] or ANSI interval literal expression.
* Two syntaxes are supported:
@ -2402,33 +2448,8 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
} else if (ctx.errorCapturingMultiUnitsInterval != null && !conf.legacyIntervalEnabled) {
val units =
ctx.errorCapturingMultiUnitsInterval.body.unit.asScala.map(
_.getText.toLowerCase(Locale.ROOT).stripSuffix("s"))
val yearMonthFields = Set.empty[Byte]
val dayTimeFields = Set.empty[Byte]
for (unit <- units) {
if (YearMonthIntervalType.stringToField.contains(unit)) {
yearMonthFields += YearMonthIntervalType.stringToField(unit)
} else if (DayTimeIntervalType.stringToField.contains(unit)) {
dayTimeFields += DayTimeIntervalType.stringToField(unit)
} else if (unit == "week") {
dayTimeFields += DayTimeIntervalType.DAY
} else {
assert(unit == "millisecond" || unit == "microsecond")
dayTimeFields += DayTimeIntervalType.SECOND
}
}
if (yearMonthFields.nonEmpty) {
if (dayTimeFields.nonEmpty) {
val literalStr = source(ctx)
throw QueryParsingErrors.mixedIntervalUnitsError(literalStr, ctx)
}
Literal(
calendarInterval.months, YearMonthIntervalType(yearMonthFields.min, yearMonthFields.max))
} else {
Literal(
IntervalUtils.getDuration(calendarInterval, TimeUnit.MICROSECONDS),
DayTimeIntervalType(dayTimeFields.min, dayTimeFields.max))
}
_.getText.toLowerCase(Locale.ROOT).stripSuffix("s")).toSeq
constructMultiUnitsIntervalLiteral(ctx, calendarInterval, units)
} else {
Literal(calendarInterval, CalendarIntervalType)
}

View file

@ -210,7 +210,7 @@ object QueryParsingErrors {
new ParseException(s"Intervals FROM $from TO $to are not supported.", ctx)
}
def mixedIntervalUnitsError(literal: String, ctx: IntervalContext): Throwable = {
def mixedIntervalUnitsError(literal: String, ctx: ParserRuleContext): Throwable = {
new ParseException(s"Cannot mix year-month and day-time fields: $literal", ctx)
}

View file

@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.{EqualTo, Hex, Literal}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition.{after, first}
import org.apache.spark.sql.connector.expressions.{ApplyTransform, BucketTransform, DaysTransform, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, Transform, YearsTransform}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType, TimestampType}
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
@ -2515,12 +2516,20 @@ class DDLParserSuite extends AnalysisTest {
val dateTypeSql = "INSERT INTO t PARTITION(part = date'2019-01-02') VALUES('a')"
val interval = new CalendarInterval(7, 1, 1000).toString
val intervalTypeSql = s"INSERT INTO t PARTITION(part = interval'$interval') VALUES('a')"
val ymIntervalTypeSql = "INSERT INTO t PARTITION(part = interval'1 year 2 month') VALUES('a')"
val dtIntervalTypeSql = "INSERT INTO t PARTITION(part = interval'1 day 2 hour " +
"3 minute 4.123456 second 5 millisecond 6 microsecond') VALUES('a')"
val timestamp = "2019-01-02 11:11:11"
val timestampTypeSql = s"INSERT INTO t PARTITION(part = timestamp'$timestamp') VALUES('a')"
val binaryTypeSql = s"INSERT INTO t PARTITION(part = X'$binaryHexStr') VALUES('a')"
comparePlans(parsePlan(dateTypeSql), insertPartitionPlan("2019-01-02"))
comparePlans(parsePlan(intervalTypeSql), insertPartitionPlan(interval))
withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") {
comparePlans(parsePlan(intervalTypeSql), insertPartitionPlan(interval))
}
comparePlans(parsePlan(ymIntervalTypeSql), insertPartitionPlan("INTERVAL '1-2' YEAR TO MONTH"))
comparePlans(parsePlan(dtIntervalTypeSql),
insertPartitionPlan("INTERVAL '1 02:03:04.128462' DAY TO SECOND"))
comparePlans(parsePlan(timestampTypeSql), insertPartitionPlan(timestamp))
comparePlans(parsePlan(binaryTypeSql), insertPartitionPlan(binaryStr))
}

View file

@ -491,18 +491,36 @@ class ExpressionParserSuite extends AnalysisTest {
checkTimestampNTZAndLTZ()
}
// Interval.
val intervalLiteral = Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour"))
assertEqual("InterVal 'interval 3 month 1 hour'", intervalLiteral)
assertEqual("INTERVAL '3 month 1 hour'", intervalLiteral)
intercept("Interval 'interval 3 monthsss 1 hoursss'", "Cannot parse the INTERVAL value")
assertEqual(
"-interval '3 month 1 hour'",
UnaryMinus(Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour"))))
val intervalStrWithAllUnits = "1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds " +
"100 millisecond 200 microseconds"
assertEqual(
s"interval '$intervalStrWithAllUnits'",
Literal(IntervalUtils.stringToInterval(intervalStrWithAllUnits)))
val ymIntervalLiteral = Literal.create(Period.of(1, 2, 0), YearMonthIntervalType())
assertEqual("InterVal 'interval 1 year 2 month'", ymIntervalLiteral)
assertEqual("INTERVAL '1 year 2 month'", ymIntervalLiteral)
intercept("Interval 'interval 1 yearsss 2 monthsss'",
"Cannot parse the INTERVAL value: interval 1 yearsss 2 monthsss")
assertEqual("-interval '1 year 2 month'", UnaryMinus(ymIntervalLiteral))
val dtIntervalLiteral = Literal.create(
Duration.ofDays(1).plusHours(2).plusMinutes(3).plusSeconds(4).plusMillis(5).plusNanos(6000))
assertEqual("InterVal 'interval 1 day 2 hour 3 minute 4.005006 second'", dtIntervalLiteral)
assertEqual("INTERVAL '1 day 2 hour 3 minute 4.005006 second'", dtIntervalLiteral)
intercept("Interval 'interval 1 daysss 2 hoursss'",
"Cannot parse the INTERVAL value: interval 1 daysss 2 hoursss")
assertEqual("-interval '1 day 2 hour 3 minute 4.005006 second'", UnaryMinus(dtIntervalLiteral))
intercept("INTERVAL '1 year 2 second'",
"Cannot mix year-month and day-time fields: INTERVAL '1 year 2 second'")
withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") {
val intervalLiteral = Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour"))
assertEqual("InterVal 'interval 3 month 1 hour'", intervalLiteral)
assertEqual("INTERVAL '3 month 1 hour'", intervalLiteral)
intercept("Interval 'interval 3 monthsss 1 hoursss'", "Cannot parse the INTERVAL value")
assertEqual(
"-interval '3 month 1 hour'",
UnaryMinus(Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour"))))
val intervalStrWithAllUnits = "1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds " +
"100 millisecond 200 microseconds"
assertEqual(
s"interval '$intervalStrWithAllUnits'",
Literal(IntervalUtils.stringToInterval(intervalStrWithAllUnits)))
}
// Binary.
assertEqual("X'A'", Literal(Array(0x0a).map(_.toByte)))

View file

@ -117,9 +117,10 @@ select map(1, interval 1 week, 2, interval 2 day);
select map(1, interval 2 millisecond, 3, interval 3 microsecond);
-- typed interval expression
select interval 'interval 3 year 1 hour';
select interval '3 year 1 hour';
SELECT interval '1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds';
select interval 'interval 3 year 1 month';
select interval '3 year 1 month';
SELECT interval 'interval 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds';
SELECT interval '2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds';
-- malformed interval literal
select interval;

View file

@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 199
-- Number of queries: 200
-- !query
@ -89,14 +89,14 @@ select interval '2 seconds' / 0
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero
java.lang.ArithmeticException
/ by zero
-- !query
select interval '2 seconds' / null
-- !query schema
struct<divide_interval(INTERVAL '2 seconds', NULL):interval>
struct<(INTERVAL '02' SECOND / NULL):interval day to second>
-- !query output
NULL
@ -104,7 +104,7 @@ NULL
-- !query
select interval '2 seconds' * null
-- !query schema
struct<multiply_interval(INTERVAL '2 seconds', NULL):interval>
struct<(INTERVAL '02' SECOND * NULL):interval day to second>
-- !query output
NULL
@ -112,7 +112,7 @@ NULL
-- !query
select null * interval '2 seconds'
-- !query schema
struct<multiply_interval(INTERVAL '2 seconds', NULL):interval>
struct<(INTERVAL '02' SECOND * NULL):interval day to second>
-- !query output
NULL
@ -120,25 +120,31 @@ NULL
-- !query
select -interval '-1 month 1 day -1 second'
-- !query schema
struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval>
struct<>
-- !query output
1 months -1 days 1 seconds
org.apache.spark.sql.catalyst.parser.ParseException
Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)
== SQL ==
select -interval '-1 month 1 day -1 second'
--------^^^
-- !query
select -interval '-1 year 1 month'
-- !query schema
struct<(- INTERVAL '-11 months'):interval>
struct<(- INTERVAL '-0-11' YEAR TO MONTH):interval year to month>
-- !query output
11 months
0-11
-- !query
select -interval '-1 day 1 hour -1 minute 1 second'
-- !query schema
struct<(- INTERVAL '-1 days 59 minutes 1 seconds'):interval>
struct<(- INTERVAL '-0 23:00:59' DAY TO SECOND):interval day to second>
-- !query output
1 days -59 minutes -1 seconds
0 23:00:59.000000000
-- !query
@ -174,25 +180,31 @@ struct<(- INTERVAL '-0 23:00:59' DAY TO SECOND):interval day to second>
-- !query
select +interval '-1 month 1 day -1 second'
-- !query schema
struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval>
struct<>
-- !query output
-1 months 1 days -1 seconds
org.apache.spark.sql.catalyst.parser.ParseException
Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)
== SQL ==
select +interval '-1 month 1 day -1 second'
--------^^^
-- !query
select +interval '-1 year 1 month'
-- !query schema
struct<(+ INTERVAL '-11 months'):interval>
struct<(+ INTERVAL '-0-11' YEAR TO MONTH):interval year to month>
-- !query output
-11 months
-0-11
-- !query
select +interval '-1 day 1 hour -1 minute 1 second'
-- !query schema
struct<(+ INTERVAL '-1 days 59 minutes 1 seconds'):interval>
struct<(+ INTERVAL '-0 23:00:59' DAY TO SECOND):interval day to second>
-- !query output
-1 days 59 minutes 1 seconds
-0 23:00:59.000000000
-- !query
@ -837,27 +849,35 @@ struct<map(1, INTERVAL '00.002' SECOND, 3, INTERVAL '00.000003' SECOND):map<int,
-- !query
select interval 'interval 3 year 1 hour'
select interval 'interval 3 year 1 month'
-- !query schema
struct<INTERVAL '3 years 1 hours':interval>
struct<INTERVAL '3-1' YEAR TO MONTH:interval year to month>
-- !query output
3 years 1 hours
3-1
-- !query
select interval '3 year 1 hour'
select interval '3 year 1 month'
-- !query schema
struct<INTERVAL '3 years 1 hours':interval>
struct<INTERVAL '3-1' YEAR TO MONTH:interval year to month>
-- !query output
3 years 1 hours
3-1
-- !query
SELECT interval '1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds'
SELECT interval 'interval 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds'
-- !query schema
struct<INTERVAL '1 years 3 months 16 days 1 hours 3 minutes 2.1002 seconds':interval>
struct<INTERVAL '16 01:03:02.1002' DAY TO SECOND:interval day to second>
-- !query output
1 years 3 months 16 days 1 hours 3 minutes 2.1002 seconds
16 01:03:02.100200000
-- !query
SELECT interval '2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds'
-- !query schema
struct<INTERVAL '16 01:03:02.1002' DAY TO SECOND:interval day to second>
-- !query output
16 01:03:02.100200000
-- !query
@ -1232,25 +1252,25 @@ struct<(INTERVAL '99 11:22:33.123456' DAY TO SECOND + INTERVAL '10 09:08:07.1234
-- !query
select interval '\t interval 1 day'
-- !query schema
struct<INTERVAL '1 days':interval>
struct<INTERVAL '1' DAY:interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query
select interval 'interval \t 1\tday'
-- !query schema
struct<INTERVAL '1 days':interval>
struct<INTERVAL '1' DAY:interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query
select interval 'interval\t1\tday'
-- !query schema
struct<INTERVAL '1 days':interval>
struct<INTERVAL '1' DAY:interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query
@ -1389,7 +1409,7 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow
Overflow
-- !query
@ -1398,7 +1418,7 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow
Overflow
-- !query

View file

@ -451,9 +451,9 @@ cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch:
-- !query
select +interval '1 day'
-- !query schema
struct<(+ INTERVAL '1 days'):interval>
struct<(+ INTERVAL '1' DAY):interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query

View file

@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 199
-- Number of queries: 200
-- !query
@ -87,15 +87,16 @@ long overflow
-- !query
select interval '2 seconds' / 0
-- !query schema
struct<divide_interval(INTERVAL '2 seconds', 0):interval>
struct<>
-- !query output
NULL
java.lang.ArithmeticException
/ by zero
-- !query
select interval '2 seconds' / null
-- !query schema
struct<divide_interval(INTERVAL '2 seconds', NULL):interval>
struct<(INTERVAL '02' SECOND / NULL):interval day to second>
-- !query output
NULL
@ -103,7 +104,7 @@ NULL
-- !query
select interval '2 seconds' * null
-- !query schema
struct<multiply_interval(INTERVAL '2 seconds', NULL):interval>
struct<(INTERVAL '02' SECOND * NULL):interval day to second>
-- !query output
NULL
@ -111,7 +112,7 @@ NULL
-- !query
select null * interval '2 seconds'
-- !query schema
struct<multiply_interval(INTERVAL '2 seconds', NULL):interval>
struct<(INTERVAL '02' SECOND * NULL):interval day to second>
-- !query output
NULL
@ -119,25 +120,31 @@ NULL
-- !query
select -interval '-1 month 1 day -1 second'
-- !query schema
struct<(- INTERVAL '-1 months 1 days -1 seconds'):interval>
struct<>
-- !query output
1 months -1 days 1 seconds
org.apache.spark.sql.catalyst.parser.ParseException
Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)
== SQL ==
select -interval '-1 month 1 day -1 second'
--------^^^
-- !query
select -interval '-1 year 1 month'
-- !query schema
struct<(- INTERVAL '-11 months'):interval>
struct<(- INTERVAL '-0-11' YEAR TO MONTH):interval year to month>
-- !query output
11 months
0-11
-- !query
select -interval '-1 day 1 hour -1 minute 1 second'
-- !query schema
struct<(- INTERVAL '-1 days 59 minutes 1 seconds'):interval>
struct<(- INTERVAL '-0 23:00:59' DAY TO SECOND):interval day to second>
-- !query output
1 days -59 minutes -1 seconds
0 23:00:59.000000000
-- !query
@ -173,25 +180,31 @@ struct<(- INTERVAL '-0 23:00:59' DAY TO SECOND):interval day to second>
-- !query
select +interval '-1 month 1 day -1 second'
-- !query schema
struct<(+ INTERVAL '-1 months 1 days -1 seconds'):interval>
struct<>
-- !query output
-1 months 1 days -1 seconds
org.apache.spark.sql.catalyst.parser.ParseException
Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)
== SQL ==
select +interval '-1 month 1 day -1 second'
--------^^^
-- !query
select +interval '-1 year 1 month'
-- !query schema
struct<(+ INTERVAL '-11 months'):interval>
struct<(+ INTERVAL '-0-11' YEAR TO MONTH):interval year to month>
-- !query output
-11 months
-0-11
-- !query
select +interval '-1 day 1 hour -1 minute 1 second'
-- !query schema
struct<(+ INTERVAL '-1 days 59 minutes 1 seconds'):interval>
struct<(+ INTERVAL '-0 23:00:59' DAY TO SECOND):interval day to second>
-- !query output
-1 days 59 minutes 1 seconds
-0 23:00:59.000000000
-- !query
@ -835,27 +848,35 @@ struct<map(1, INTERVAL '00.002' SECOND, 3, INTERVAL '00.000003' SECOND):map<int,
-- !query
select interval 'interval 3 year 1 hour'
select interval 'interval 3 year 1 month'
-- !query schema
struct<INTERVAL '3 years 1 hours':interval>
struct<INTERVAL '3-1' YEAR TO MONTH:interval year to month>
-- !query output
3 years 1 hours
3-1
-- !query
select interval '3 year 1 hour'
select interval '3 year 1 month'
-- !query schema
struct<INTERVAL '3 years 1 hours':interval>
struct<INTERVAL '3-1' YEAR TO MONTH:interval year to month>
-- !query output
3 years 1 hours
3-1
-- !query
SELECT interval '1 year 3 months 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds'
SELECT interval 'interval 2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds'
-- !query schema
struct<INTERVAL '1 years 3 months 16 days 1 hours 3 minutes 2.1002 seconds':interval>
struct<INTERVAL '16 01:03:02.1002' DAY TO SECOND:interval day to second>
-- !query output
1 years 3 months 16 days 1 hours 3 minutes 2.1002 seconds
16 01:03:02.100200000
-- !query
SELECT interval '2 weeks 2 days 1 hour 3 minutes 2 seconds 100 millisecond 200 microseconds'
-- !query schema
struct<INTERVAL '16 01:03:02.1002' DAY TO SECOND:interval day to second>
-- !query output
16 01:03:02.100200000
-- !query
@ -1230,25 +1251,25 @@ struct<(INTERVAL '99 11:22:33.123456' DAY TO SECOND + INTERVAL '10 09:08:07.1234
-- !query
select interval '\t interval 1 day'
-- !query schema
struct<INTERVAL '1 days':interval>
struct<INTERVAL '1' DAY:interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query
select interval 'interval \t 1\tday'
-- !query schema
struct<INTERVAL '1 days':interval>
struct<INTERVAL '1' DAY:interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query
select interval 'interval\t1\tday'
-- !query schema
struct<INTERVAL '1 days':interval>
struct<INTERVAL '1' DAY:interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query
@ -1357,41 +1378,46 @@ select interval 'interval 1中文day'
-- !query
select -(a) from values (interval '-2147483648 months', interval '2147483647 months') t(a, b)
-- !query schema
struct<(- a):interval>
struct<>
-- !query output
-178956970 years -8 months
java.lang.ArithmeticException
integer overflow
-- !query
select a - b from values (interval '-2147483648 months', interval '2147483647 months') t(a, b)
-- !query schema
struct<(a - b):interval>
struct<>
-- !query output
1 months
java.lang.ArithmeticException
integer overflow
-- !query
select b + interval '1 month' from values (interval '-2147483648 months', interval '2147483647 months') t(a, b)
-- !query schema
struct<(b + INTERVAL '1 months'):interval>
struct<>
-- !query output
-178956970 years -8 months
java.lang.ArithmeticException
integer overflow
-- !query
select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 months') t(a, b)
-- !query schema
struct<multiply_interval(a, 1.1):interval>
struct<>
-- !query output
-178956970 years -8 months
java.lang.ArithmeticException
Overflow
-- !query
select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 months') t(a, b)
-- !query schema
struct<divide_interval(a, 0.5):interval>
struct<>
-- !query output
-178956970 years -8 months
java.lang.ArithmeticException
Overflow
-- !query

View file

@ -451,9 +451,9 @@ cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch:
-- !query
select +interval '1 day'
-- !query schema
struct<(+ INTERVAL '1 days'):interval>
struct<(+ INTERVAL '1' DAY):interval day>
-- !query output
1 days
1 00:00:00.000000000
-- !query

View file

@ -37,9 +37,9 @@ float double decimal(2,1)
-- !query
select typeof(date '1986-05-23'), typeof(timestamp '1986-05-23'), typeof(interval '23 days')
-- !query schema
struct<typeof(DATE '1986-05-23'):string,typeof(TIMESTAMP '1986-05-23 00:00:00'):string,typeof(INTERVAL '23 days'):string>
struct<typeof(DATE '1986-05-23'):string,typeof(TIMESTAMP '1986-05-23 00:00:00'):string,typeof(INTERVAL '23' DAY):string>
-- !query output
date timestamp interval
date timestamp interval day
-- !query

View file

@ -297,14 +297,39 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession {
val d1 = Date.valueOf("2015-07-31")
val d2 = Date.valueOf("2015-12-31")
val i = new CalendarInterval(2, 2, 2000000L)
val day = "1 day"
val ym = "1 year 2 month"
val dt = "1 day 2 hour 3 minute 4 second 5 millisecond 6 microsecond"
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d + INTERVAL'${i.toString}'"),
Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02"))))
df.selectExpr(s"d + INTERVAL'$ym'"),
Seq(Row(Date.valueOf("2016-09-30")),
Row(Date.valueOf("2017-02-28"))))
checkAnswer(
df.selectExpr(s"t + INTERVAL'${i.toString}'"),
Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")),
Row(Timestamp.valueOf("2016-03-02 00:00:02"))))
df.selectExpr(s"t + INTERVAL'$ym'"),
Seq(Row(Timestamp.valueOf("2016-09-30 23:59:59")),
Row(Timestamp.valueOf("2017-02-28 00:00:00"))))
checkAnswer(
df.selectExpr(s"d + INTERVAL'$dt'"),
Seq(Row(Timestamp.valueOf("2015-08-01 02:03:04.005006")),
Row(Timestamp.valueOf("2016-01-01 02:03:04.005006"))))
checkAnswer(
df.selectExpr(s"d + INTERVAL '$day'"),
Seq(Row(Date.valueOf("2015-08-01")),
Row(Date.valueOf("2016-01-01"))))
checkAnswer(
df.selectExpr(s"t + INTERVAL'$dt'"),
Seq(Row(Timestamp.valueOf("2015-08-02 02:03:03.005006")),
Row(Timestamp.valueOf("2016-01-01 02:03:04.005006"))))
withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") {
checkAnswer(
df.selectExpr(s"d + INTERVAL'${i.toString}'"),
Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02"))))
checkAnswer(
df.selectExpr(s"t + INTERVAL'${i.toString}'"),
Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")),
Row(Timestamp.valueOf("2016-03-02 00:00:02"))))
}
}
test("time_sub") {
@ -313,14 +338,39 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession {
val d1 = Date.valueOf("2015-09-30")
val d2 = Date.valueOf("2016-02-29")
val i = new CalendarInterval(2, 2, 2000000L)
val day = "1 day"
val ym = "1 year 2 month"
val dt = "1 day 2 hour 3 minute 4 second 5 millisecond 6 microsecond"
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d - INTERVAL'${i.toString}'"),
Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26"))))
df.selectExpr(s"d - INTERVAL'$ym'"),
Seq(Row(Date.valueOf("2014-07-30")),
Row(Date.valueOf("2014-12-29"))))
checkAnswer(
df.selectExpr(s"t - INTERVAL'${i.toString}'"),
Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")),
Row(Timestamp.valueOf("2015-12-27 00:00:00"))))
df.selectExpr(s"t - INTERVAL'$ym'"),
Seq(Row(Timestamp.valueOf("2014-08-01 00:00:01")),
Row(Timestamp.valueOf("2014-12-29 00:00:02"))))
checkAnswer(
df.selectExpr(s"d - INTERVAL'$dt'"),
Seq(Row(Timestamp.valueOf("2015-09-28 21:56:55.994994")),
Row(Timestamp.valueOf("2016-02-27 21:56:55.994994"))))
checkAnswer(
df.selectExpr(s"d - INTERVAL '$day'"),
Seq(Row(Date.valueOf("2015-09-29")),
Row(Date.valueOf("2016-02-28"))))
checkAnswer(
df.selectExpr(s"t - INTERVAL'$dt'"),
Seq(Row(Timestamp.valueOf("2015-09-29 21:56:56.994994")),
Row(Timestamp.valueOf("2016-02-27 21:56:57.994994"))))
withSQLConf(SQLConf.LEGACY_INTERVAL_ENABLED.key -> "true") {
checkAnswer(
df.selectExpr(s"d - INTERVAL'${i.toString}'"),
Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26"))))
checkAnswer(
df.selectExpr(s"t - INTERVAL'${i.toString}'"),
Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")),
Row(Timestamp.valueOf("2015-12-27 00:00:00"))))
}
}
test("function add_months") {