[SPARK-29761][SQL] do not output leading 'interval' in CalendarInterval.toString

### What changes were proposed in this pull request?

remove the leading "interval" in `CalendarInterval.toString`.

### Why are the changes needed?

Although it's allowed to have "interval" prefix when casting string to int, it's not recommended.

This is also consistent with pgsql:
```
cloud0fan=# select interval '1' day;
 interval
----------
 1 day
(1 row)
```

### Does this PR introduce any user-facing change?

yes, when display a dataframe with interval type column, the result is different.

### How was this patch tested?

updated tests.

Closes #26401 from cloud-fan/interval.

Authored-by: Wenchen Fan <wenchen@databricks.com>
Signed-off-by: Wenchen Fan <wenchen@databricks.com>
This commit is contained in:
Wenchen Fan 2019-11-07 15:44:50 +08:00
parent 29dc59ac29
commit 9b61f90987
19 changed files with 221 additions and 216 deletions

View file

@ -84,35 +84,38 @@ public final class CalendarInterval implements Serializable {
@Override
public String toString() {
StringBuilder sb = new StringBuilder("interval");
if (months != 0) {
appendUnit(sb, months / 12, "year");
appendUnit(sb, months % 12, "month");
if (months == 0 && days == 0 && microseconds == 0) {
return "0 seconds";
}
appendUnit(sb, days, "day");
StringBuilder sb = new StringBuilder();
if (months != 0) {
appendUnit(sb, months / 12, "years");
appendUnit(sb, months % 12, "months");
}
appendUnit(sb, days, "days");
if (microseconds != 0) {
long rest = microseconds;
appendUnit(sb, rest / MICROS_PER_HOUR, "hour");
appendUnit(sb, rest / MICROS_PER_HOUR, "hours");
rest %= MICROS_PER_HOUR;
appendUnit(sb, rest / MICROS_PER_MINUTE, "minute");
appendUnit(sb, rest / MICROS_PER_MINUTE, "minutes");
rest %= MICROS_PER_MINUTE;
if (rest != 0) {
String s = BigDecimal.valueOf(rest, 6).stripTrailingZeros().toPlainString();
sb.append(' ').append(s).append(" seconds");
sb.append(s).append(" seconds ");
}
} else if (months == 0 && days == 0) {
sb.append(" 0 microseconds");
}
sb.setLength(sb.length() - 1);
return sb.toString();
}
private void appendUnit(StringBuilder sb, long value, String unit) {
if (value != 0) {
sb.append(' ').append(value).append(' ').append(unit).append('s');
sb.append(value).append(' ').append(unit).append(' ');
}
}

View file

@ -51,28 +51,28 @@ public class CalendarIntervalSuite {
CalendarInterval i;
i = new CalendarInterval(0, 0, 0);
assertEquals("interval 0 microseconds", i.toString());
assertEquals("0 seconds", i.toString());
i = new CalendarInterval(34, 0, 0);
assertEquals("interval 2 years 10 months", i.toString());
assertEquals("2 years 10 months", i.toString());
i = new CalendarInterval(-34, 0, 0);
assertEquals("interval -2 years -10 months", i.toString());
assertEquals("-2 years -10 months", i.toString());
i = new CalendarInterval(0, 31, 0);
assertEquals("interval 31 days", i.toString());
assertEquals("31 days", i.toString());
i = new CalendarInterval(0, -31, 0);
assertEquals("interval -31 days", i.toString());
assertEquals("-31 days", i.toString());
i = new CalendarInterval(0, 0, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123);
assertEquals("interval 3 hours 13 minutes 0.000123 seconds", i.toString());
assertEquals("3 hours 13 minutes 0.000123 seconds", i.toString());
i = new CalendarInterval(0, 0, -3 * MICROS_PER_HOUR - 13 * MICROS_PER_MINUTE - 123);
assertEquals("interval -3 hours -13 minutes -0.000123 seconds", i.toString());
assertEquals("-3 hours -13 minutes -0.000123 seconds", i.toString());
i = new CalendarInterval(34, 31, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123);
assertEquals("interval 2 years 10 months 31 days 3 hours 13 minutes 0.000123 seconds",
assertEquals("2 years 10 months 31 days 3 hours 13 minutes 0.000123 seconds",
i.toString());
}

View file

@ -222,6 +222,8 @@ license: |
- Since Spark 3.0, the interval literal syntax does not allow multiple from-to units anymore. For example, `SELECT INTERVAL '1-1' YEAR TO MONTH '2-2' YEAR TO MONTH'` throws parser exception.
- Since Spark 3.0, when casting interval values to string type, there is no "interval" prefix, e.g. `1 days 2 hours`. In Spark version 2.4 and earlier, the string contains the "interval" prefix like `interval 1 days 2 hours`.
## Upgrading from Spark SQL 2.4 to 2.4.1
- The value of `spark.executor.heartbeatInterval`, when specified without units like "30" rather than "30s", was

View file

@ -34,7 +34,7 @@ import org.apache.spark.sql.types._
> SELECT _FUNC_(col) FROM VALUES (NULL), (NULL) AS tab(col);
NULL
> SELECT _FUNC_(cast(col as interval)) FROM VALUES ('1 seconds'), ('2 seconds'), (null) tab(col);
interval 3 seconds
3 seconds
""",
since = "1.0.0")
// scalastyle:on line.size.limit

View file

@ -669,7 +669,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(Cast(Literal.create(
new CalendarInterval(15, 9, -3 * CalendarInterval.MICROS_PER_HOUR), CalendarIntervalType),
StringType),
"interval 1 years 3 months 9 days -3 hours")
"1 years 3 months 9 days -3 hours")
checkEvaluation(Cast(Literal("INTERVAL 1 Second 1 microsecond"), CalendarIntervalType),
new CalendarInterval(0, 0, 1000001))
checkEvaluation(Cast(Literal("1 MONTH 1 Microsecond"), CalendarIntervalType),

View file

@ -169,12 +169,12 @@ class ExpressionSQLBuilderSuite extends SparkFunSuite {
checkSQL(
TimeAdd('a, interval),
"`a` + interval 1 hours"
"`a` + 1 hours"
)
checkSQL(
TimeSub('a, interval),
"`a` - interval 1 hours"
"`a` - 1 hours"
)
}
}

View file

@ -25,9 +25,9 @@ select
'1' year,
2 years
-- !query 1 schema
struct<interval 1 seconds:interval,interval 2 seconds:interval,interval 1 minutes:interval,interval 2 minutes:interval,interval 1 hours:interval,interval 2 hours:interval,interval 1 days:interval,interval 2 days:interval,interval 1 months:interval,interval 2 months:interval,interval 1 years:interval,interval 2 years:interval>
struct<1 seconds:interval,2 seconds:interval,1 minutes:interval,2 minutes:interval,1 hours:interval,2 hours:interval,1 days:interval,2 days:interval,1 months:interval,2 months:interval,1 years:interval,2 years:interval>
-- !query 1 output
interval 1 seconds interval 2 seconds interval 1 minutes interval 2 minutes interval 1 hours interval 2 hours interval 1 days interval 2 days interval 1 months interval 2 months interval 1 years interval 2 years
1 seconds 2 seconds 1 minutes 2 minutes 1 hours 2 hours 1 days 2 days 1 months 2 months 1 years 2 years
-- !query 2
@ -36,9 +36,9 @@ select
interval '10' year,
interval '11' month
-- !query 2 schema
struct<interval 10 years 11 months:interval,interval 10 years:interval,interval 11 months:interval>
struct<10 years 11 months:interval,10 years:interval,11 months:interval>
-- !query 2 output
interval 10 years 11 months interval 10 years interval 11 months
10 years 11 months 10 years 11 months
-- !query 3
@ -47,9 +47,9 @@ select
'10' year,
'11' month
-- !query 3 schema
struct<interval 10 years 11 months:interval,interval 10 years:interval,interval 11 months:interval>
struct<10 years 11 months:interval,10 years:interval,11 months:interval>
-- !query 3 output
interval 10 years 11 months interval 10 years interval 11 months
10 years 11 months 10 years 11 months
-- !query 4
@ -61,9 +61,9 @@ select
interval '13' second,
interval '13.123456789' second
-- !query 4 schema
struct<interval 10 days 9 hours 8 minutes 7.987654 seconds:interval,interval 10 days:interval,interval 11 hours:interval,interval 12 minutes:interval,interval 13 seconds:interval,interval 13.123456 seconds:interval>
struct<10 days 9 hours 8 minutes 7.987654 seconds:interval,10 days:interval,11 hours:interval,12 minutes:interval,13 seconds:interval,13.123456 seconds:interval>
-- !query 4 output
interval 10 days 9 hours 8 minutes 7.987654 seconds interval 10 days interval 11 hours interval 12 minutes interval 13 seconds interval 13.123456 seconds
10 days 9 hours 8 minutes 7.987654 seconds 10 days 11 hours 12 minutes 13 seconds 13.123456 seconds
-- !query 5
@ -75,25 +75,25 @@ select
'13' second,
'13.123456789' second
-- !query 5 schema
struct<interval 10 days 9 hours 8 minutes 7.987654 seconds:interval,interval 10 days:interval,interval 11 hours:interval,interval 12 minutes:interval,interval 13 seconds:interval,interval 13.123456 seconds:interval>
struct<10 days 9 hours 8 minutes 7.987654 seconds:interval,10 days:interval,11 hours:interval,12 minutes:interval,13 seconds:interval,13.123456 seconds:interval>
-- !query 5 output
interval 10 days 9 hours 8 minutes 7.987654 seconds interval 10 days interval 11 hours interval 12 minutes interval 13 seconds interval 13.123456 seconds
10 days 9 hours 8 minutes 7.987654 seconds 10 days 11 hours 12 minutes 13 seconds 13.123456 seconds
-- !query 6
select map(1, interval 1 day, 2, interval 3 week)
-- !query 6 schema
struct<map(1, interval 1 days, 2, interval 21 days):map<int,interval>>
struct<map(1, 1 days, 2, 21 days):map<int,interval>>
-- !query 6 output
{1:interval 1 days,2:interval 21 days}
{1:1 days,2:21 days}
-- !query 7
select map(1, 1 day, 2, 3 week)
-- !query 7 schema
struct<map(1, interval 1 days, 2, interval 21 days):map<int,interval>>
struct<map(1, 1 days, 2, 21 days):map<int,interval>>
-- !query 7 output
{1:interval 1 days,2:interval 21 days}
{1:1 days,2:21 days}
-- !query 8
@ -118,7 +118,7 @@ select
interval '2-2' year to month + dateval
from interval_arithmetic
-- !query 9 schema
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- interval 2 years 2 months) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date>
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- 2 years 2 months) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 2 years 2 months AS DATE):date>
-- !query 9 output
2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01
@ -134,7 +134,7 @@ select
'2-2' year to month + dateval
from interval_arithmetic
-- !query 10 schema
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- interval 2 years 2 months) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date>
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- 2 years 2 months) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 2 years 2 months AS DATE):date>
-- !query 10 output
2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01
@ -150,7 +150,7 @@ select
interval '2-2' year to month + tsval
from interval_arithmetic
-- !query 11 schema
struct<tsval:timestamp,CAST(tsval - interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval - interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval + interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + (- interval 2 years 2 months) AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp>
struct<tsval:timestamp,CAST(tsval - 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval - -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval + -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + (- 2 years 2 months) AS TIMESTAMP):timestamp,CAST(tsval + 2 years 2 months AS TIMESTAMP):timestamp>
-- !query 11 output
2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00
@ -166,7 +166,7 @@ select
'2-2' year to month + tsval
from interval_arithmetic
-- !query 12 schema
struct<tsval:timestamp,CAST(tsval - interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval - interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval + interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + (- interval 2 years 2 months) AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp>
struct<tsval:timestamp,CAST(tsval - 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval - -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval + -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + (- 2 years 2 months) AS TIMESTAMP):timestamp,CAST(tsval + 2 years 2 months AS TIMESTAMP):timestamp>
-- !query 12 output
2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00
@ -177,9 +177,9 @@ select
interval '2-2' year to month - interval '3-3' year to month
from interval_arithmetic
-- !query 13 schema
struct<(interval 2 years 2 months + interval 3 years 3 months):interval,(interval 2 years 2 months - interval 3 years 3 months):interval>
struct<(2 years 2 months + 3 years 3 months):interval,(2 years 2 months - 3 years 3 months):interval>
-- !query 13 output
interval 5 years 5 months interval -1 years -1 months
5 years 5 months -1 years -1 months
-- !query 14
@ -188,9 +188,9 @@ select
'2-2' year to month - '3-3' year to month
from interval_arithmetic
-- !query 14 schema
struct<(interval 2 years 2 months + interval 3 years 3 months):interval,(interval 2 years 2 months - interval 3 years 3 months):interval>
struct<(2 years 2 months + 3 years 3 months):interval,(2 years 2 months - 3 years 3 months):interval>
-- !query 14 output
interval 5 years 5 months interval -1 years -1 months
5 years 5 months -1 years -1 months
-- !query 15
@ -204,7 +204,7 @@ select
interval '99 11:22:33.123456789' day to second + dateval
from interval_arithmetic
-- !query 15 schema
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- interval 99 days 11 hours 22 minutes 33.123456 seconds) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date>
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- 99 days 11 hours 22 minutes 33.123456 seconds) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date>
-- !query 15 output
2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09
@ -220,7 +220,7 @@ select
'99 11:22:33.123456789' day to second + dateval
from interval_arithmetic
-- !query 16 schema
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- interval 99 days 11 hours 22 minutes 33.123456 seconds) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date>
struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + -99 days -11 hours -22 minutes -33.123456 seconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- 99 days 11 hours 22 minutes 33.123456 seconds) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + 99 days 11 hours 22 minutes 33.123456 seconds AS DATE):date>
-- !query 16 output
2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09
@ -236,7 +236,7 @@ select
interval '99 11:22:33.123456789' day to second + tsval
from interval_arithmetic
-- !query 17 schema
struct<tsval:timestamp,CAST(tsval - interval 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval - interval -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + interval 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + interval -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + (- interval 99 days 11 hours 22 minutes 33.123456 seconds) AS TIMESTAMP):timestamp,CAST(tsval + interval 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp>
struct<tsval:timestamp,CAST(tsval - 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval - -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + (- 99 days 11 hours 22 minutes 33.123456 seconds) AS TIMESTAMP):timestamp,CAST(tsval + 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp>
-- !query 17 output
2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456
@ -252,7 +252,7 @@ select
'99 11:22:33.123456789' day to second + tsval
from interval_arithmetic
-- !query 18 schema
struct<tsval:timestamp,CAST(tsval - interval 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval - interval -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + interval 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + interval -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + (- interval 99 days 11 hours 22 minutes 33.123456 seconds) AS TIMESTAMP):timestamp,CAST(tsval + interval 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp>
struct<tsval:timestamp,CAST(tsval - 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval - -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + -99 days -11 hours -22 minutes -33.123456 seconds AS TIMESTAMP):timestamp,CAST(tsval + (- 99 days 11 hours 22 minutes 33.123456 seconds) AS TIMESTAMP):timestamp,CAST(tsval + 99 days 11 hours 22 minutes 33.123456 seconds AS TIMESTAMP):timestamp>
-- !query 18 output
2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456
@ -263,9 +263,9 @@ select
interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
from interval_arithmetic
-- !query 19 schema
struct<(interval 99 days 11 hours 22 minutes 33.123456 seconds + interval 10 days 9 hours 8 minutes 7.123456 seconds):interval,(interval 99 days 11 hours 22 minutes 33.123456 seconds - interval 10 days 9 hours 8 minutes 7.123456 seconds):interval>
struct<(99 days 11 hours 22 minutes 33.123456 seconds + 10 days 9 hours 8 minutes 7.123456 seconds):interval,(99 days 11 hours 22 minutes 33.123456 seconds - 10 days 9 hours 8 minutes 7.123456 seconds):interval>
-- !query 19 output
interval 109 days 20 hours 30 minutes 40.246912 seconds interval 89 days 2 hours 14 minutes 26 seconds
109 days 20 hours 30 minutes 40.246912 seconds 89 days 2 hours 14 minutes 26 seconds
-- !query 20
@ -274,17 +274,17 @@ select
'99 11:22:33.123456789' day to second - '10 9:8:7.123456789' day to second
from interval_arithmetic
-- !query 20 schema
struct<(interval 99 days 11 hours 22 minutes 33.123456 seconds + interval 10 days 9 hours 8 minutes 7.123456 seconds):interval,(interval 99 days 11 hours 22 minutes 33.123456 seconds - interval 10 days 9 hours 8 minutes 7.123456 seconds):interval>
struct<(99 days 11 hours 22 minutes 33.123456 seconds + 10 days 9 hours 8 minutes 7.123456 seconds):interval,(99 days 11 hours 22 minutes 33.123456 seconds - 10 days 9 hours 8 minutes 7.123456 seconds):interval>
-- !query 20 output
interval 109 days 20 hours 30 minutes 40.246912 seconds interval 89 days 2 hours 14 minutes 26 seconds
109 days 20 hours 30 minutes 40.246912 seconds 89 days 2 hours 14 minutes 26 seconds
-- !query 21
select 30 day
-- !query 21 schema
struct<interval 30 days:interval>
struct<30 days:interval>
-- !query 21 output
interval 30 days
30 days
-- !query 22
@ -318,7 +318,7 @@ select 30 day day day
-- !query 24
select date '2012-01-01' - 30 day
-- !query 24 schema
struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) - interval 30 days AS DATE):date>
struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) - 30 days AS DATE):date>
-- !query 24 output
2011-12-02
@ -354,7 +354,7 @@ select date '2012-01-01' - 30 day day day
-- !query 27
select date '2012-01-01' + '-30' day
-- !query 27 schema
struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) + interval -30 days AS DATE):date>
struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) + -30 days AS DATE):date>
-- !query 27 output
2011-12-02
@ -362,7 +362,7 @@ struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) + interval -30 days AS DATE):da
-- !query 28
select date '2012-01-01' + interval '-30' day
-- !query 28 schema
struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) + interval -30 days AS DATE):date>
struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) + -30 days AS DATE):date>
-- !query 28 output
2011-12-02

View file

@ -278,12 +278,12 @@ SELECT CAST('interval 3 month 1 hour' AS interval)
-- !query 33 schema
struct<CAST(interval 3 month 1 hour AS INTERVAL):interval>
-- !query 33 output
interval 3 months 1 hours
3 months 1 hours
-- !query 34
SELECT CAST(interval 3 month 1 hour AS string)
-- !query 34 schema
struct<CAST(interval 3 months 1 hours AS STRING):string>
struct<CAST(3 months 1 hours AS STRING):string>
-- !query 34 output
interval 3 months 1 hours
3 months 1 hours

View file

@ -128,7 +128,7 @@ select date '2001-10-01' - date '2001-09-28'
-- !query 14 schema
struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):interval>
-- !query 14 output
interval 3 days
3 days
-- !query 15
@ -136,7 +136,7 @@ select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query 15 schema
struct<subtracttimestamps(CAST(DATE '2020-01-01' AS TIMESTAMP), TIMESTAMP('2019-10-06 10:11:12.345678')):interval>
-- !query 15 output
interval 2078 hours 48 minutes 47.654322 seconds
2078 hours 48 minutes 47.654322 seconds
-- !query 16
@ -144,7 +144,7 @@ select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query 16 schema
struct<subtracttimestamps(TIMESTAMP('2019-10-06 10:11:12.345678'), CAST(DATE '2020-01-01' AS TIMESTAMP)):interval>
-- !query 16 output
interval -2078 hours -48 minutes -47.654322 seconds
-2078 hours -48 minutes -47.654322 seconds
-- !query 17
@ -152,15 +152,15 @@ select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15')
-- !query 17 schema
struct<multiply_interval(subtracttimestamps(TIMESTAMP('2019-10-15 10:11:12.001002'), CAST(DATE '2019-10-15' AS TIMESTAMP)), CAST(3 AS DOUBLE)):interval>
-- !query 17 output
interval 30 hours 33 minutes 36.003006 seconds
30 hours 33 minutes 36.003006 seconds
-- !query 18
select interval 4 month 2 weeks 3 microseconds * 1.5
-- !query 18 schema
struct<multiply_interval(interval 4 months 14 days 0.000003 seconds, CAST(1.5 AS DOUBLE)):interval>
struct<multiply_interval(4 months 14 days 0.000003 seconds, CAST(1.5 AS DOUBLE)):interval>
-- !query 18 output
interval 6 months 21 days 0.000005 seconds
6 months 21 days 0.000005 seconds
-- !query 19
@ -168,13 +168,13 @@ select (timestamp'2019-10-15' - timestamp'2019-10-14') / 1.5
-- !query 19 schema
struct<divide_interval(subtracttimestamps(TIMESTAMP('2019-10-15 00:00:00'), TIMESTAMP('2019-10-14 00:00:00')), CAST(1.5 AS DOUBLE)):interval>
-- !query 19 output
interval 16 hours
16 hours
-- !query 20
select interval '2 seconds' / 0
-- !query 20 schema
struct<divide_interval(interval 2 seconds, CAST(0 AS DOUBLE)):interval>
struct<divide_interval(2 seconds, CAST(0 AS DOUBLE)):interval>
-- !query 20 output
NULL
@ -182,7 +182,7 @@ NULL
-- !query 21
select interval '2 seconds' / null
-- !query 21 schema
struct<divide_interval(interval 2 seconds, CAST(NULL AS DOUBLE)):interval>
struct<divide_interval(2 seconds, CAST(NULL AS DOUBLE)):interval>
-- !query 21 output
NULL
@ -190,7 +190,7 @@ NULL
-- !query 22
select interval '2 seconds' * null
-- !query 22 schema
struct<multiply_interval(interval 2 seconds, CAST(NULL AS DOUBLE)):interval>
struct<multiply_interval(2 seconds, CAST(NULL AS DOUBLE)):interval>
-- !query 22 output
NULL
@ -198,6 +198,6 @@ NULL
-- !query 23
select null * interval '2 seconds'
-- !query 23 schema
struct<multiply_interval(interval 2 seconds, CAST(NULL AS DOUBLE)):interval>
struct<multiply_interval(2 seconds, CAST(NULL AS DOUBLE)):interval>
-- !query 23 output
NULL

View file

@ -596,7 +596,7 @@ select sum(cast(v as interval)) from VALUES ('1 seconds'), ('2 seconds'), (null)
-- !query 58 schema
struct<sum(CAST(v AS INTERVAL)):interval>
-- !query 58 output
interval 3 seconds
3 seconds
-- !query 59
@ -604,7 +604,7 @@ select sum(cast(v as interval)) from VALUES ('-1 seconds'), ('2 seconds'), (null
-- !query 59 schema
struct<sum(CAST(v AS INTERVAL)):interval>
-- !query 59 output
interval 1 seconds
1 seconds
-- !query 60
@ -612,7 +612,7 @@ select sum(cast(v as interval)) from VALUES ('-1 seconds'), ('-2 seconds'), (nul
-- !query 60 schema
struct<sum(CAST(v AS INTERVAL)):interval>
-- !query 60 output
interval -3 seconds
-3 seconds
-- !query 61
@ -620,7 +620,7 @@ select sum(cast(v as interval)) from VALUES ('-1 weeks'), ('2 seconds'), (null)
-- !query 61 schema
struct<sum(CAST(v AS INTERVAL)):interval>
-- !query 61 output
interval -7 days 2 seconds
-7 days 2 seconds
-- !query 62
@ -632,8 +632,8 @@ group by i
-- !query 62 schema
struct<i:int,sum(CAST(v AS INTERVAL)):interval>
-- !query 62 output
1 interval -2 days
2 interval 2 seconds
1 -2 days
2 2 seconds
3 NULL
@ -645,7 +645,7 @@ having sv is not null
-- !query 63 schema
struct<sv:interval>
-- !query 63 output
interval -2 days 2 seconds
-2 days 2 seconds
-- !query 64
@ -656,7 +656,7 @@ FROM VALUES(1,'1 seconds'),(1,'2 seconds'),(2,NULL),(2,NULL) t(i,v)
-- !query 64 schema
struct<i:int,sum(CAST(v AS INTERVAL)) OVER (ORDER BY i ASC NULLS FIRST ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING):interval>
-- !query 64 output
1 interval 2 seconds
1 interval 3 seconds
1 2 seconds
1 3 seconds
2 NULL
2 NULL

View file

@ -323,121 +323,121 @@ select timestamp '2016-33-11 20:54:00.000'
-- !query 34
select interval 13.123456789 seconds, interval -13.123456789 second
-- !query 34 schema
struct<interval 13.123456 seconds:interval,interval -13.123456 seconds:interval>
struct<13.123456 seconds:interval,-13.123456 seconds:interval>
-- !query 34 output
interval 13.123456 seconds interval -13.123456 seconds
13.123456 seconds -13.123456 seconds
-- !query 35
select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond
-- !query 35 schema
struct<interval 1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds:interval>
struct<1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds:interval>
-- !query 35 output
interval 1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds
1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds
-- !query 36
select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second
-- !query 36 schema
struct<interval 32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds:interval>
struct<32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds:interval>
-- !query 36 output
interval 32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds
32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds
-- !query 37
select interval '0 0:0:0.1' day to second
-- !query 37 schema
struct<interval 0.1 seconds:interval>
struct<0.1 seconds:interval>
-- !query 37 output
interval 0.1 seconds
0.1 seconds
-- !query 38
select interval '10-9' year to month
-- !query 38 schema
struct<interval 10 years 9 months:interval>
struct<10 years 9 months:interval>
-- !query 38 output
interval 10 years 9 months
10 years 9 months
-- !query 39
select interval '20 15:40:32.99899999' day to hour
-- !query 39 schema
struct<interval 20 days 15 hours:interval>
struct<20 days 15 hours:interval>
-- !query 39 output
interval 20 days 15 hours
20 days 15 hours
-- !query 40
select interval '20 15:40:32.99899999' day to minute
-- !query 40 schema
struct<interval 20 days 15 hours 40 minutes:interval>
struct<20 days 15 hours 40 minutes:interval>
-- !query 40 output
interval 20 days 15 hours 40 minutes
20 days 15 hours 40 minutes
-- !query 41
select interval '20 15:40:32.99899999' day to second
-- !query 41 schema
struct<interval 20 days 15 hours 40 minutes 32.998999 seconds:interval>
struct<20 days 15 hours 40 minutes 32.998999 seconds:interval>
-- !query 41 output
interval 20 days 15 hours 40 minutes 32.998999 seconds
20 days 15 hours 40 minutes 32.998999 seconds
-- !query 42
select interval '15:40:32.99899999' hour to minute
-- !query 42 schema
struct<interval 15 hours 40 minutes:interval>
struct<15 hours 40 minutes:interval>
-- !query 42 output
interval 15 hours 40 minutes
15 hours 40 minutes
-- !query 43
select interval '15:40.99899999' hour to second
-- !query 43 schema
struct<interval 15 minutes 40.998999 seconds:interval>
struct<15 minutes 40.998999 seconds:interval>
-- !query 43 output
interval 15 minutes 40.998999 seconds
15 minutes 40.998999 seconds
-- !query 44
select interval '15:40' hour to second
-- !query 44 schema
struct<interval 15 hours 40 minutes:interval>
struct<15 hours 40 minutes:interval>
-- !query 44 output
interval 15 hours 40 minutes
15 hours 40 minutes
-- !query 45
select interval '15:40:32.99899999' hour to second
-- !query 45 schema
struct<interval 15 hours 40 minutes 32.998999 seconds:interval>
struct<15 hours 40 minutes 32.998999 seconds:interval>
-- !query 45 output
interval 15 hours 40 minutes 32.998999 seconds
15 hours 40 minutes 32.998999 seconds
-- !query 46
select interval '20 40:32.99899999' minute to second
-- !query 46 schema
struct<interval 20 days 40 minutes 32.998999 seconds:interval>
struct<20 days 40 minutes 32.998999 seconds:interval>
-- !query 46 output
interval 20 days 40 minutes 32.998999 seconds
20 days 40 minutes 32.998999 seconds
-- !query 47
select interval '40:32.99899999' minute to second
-- !query 47 schema
struct<interval 40 minutes 32.998999 seconds:interval>
struct<40 minutes 32.998999 seconds:interval>
-- !query 47 output
interval 40 minutes 32.998999 seconds
40 minutes 32.998999 seconds
-- !query 48
select interval '40:32' minute to second
-- !query 48 schema
struct<interval 40 minutes 32 seconds:interval>
struct<40 minutes 32 seconds:interval>
-- !query 48 output
interval 40 minutes 32 seconds
40 minutes 32 seconds
-- !query 49
@ -523,25 +523,25 @@ struct<3.14:decimal(3,2),-3.14:decimal(3,2),3.14E+8:decimal(3,-6),3.14E-8:decima
-- !query 56
select map(1, interval 1 day, 2, interval 3 week)
-- !query 56 schema
struct<map(1, interval 1 days, 2, interval 21 days):map<int,interval>>
struct<map(1, 1 days, 2, 21 days):map<int,interval>>
-- !query 56 output
{1:interval 1 days,2:interval 21 days}
{1:1 days,2:21 days}
-- !query 57
select interval 'interval 3 year 1 hour'
-- !query 57 schema
struct<interval 3 years 1 hours:interval>
struct<3 years 1 hours:interval>
-- !query 57 output
interval 3 years 1 hours
3 years 1 hours
-- !query 58
select interval '3 year 1 hour'
-- !query 58 schema
struct<interval 3 years 1 hours:interval>
struct<3 years 1 hours:interval>
-- !query 58 output
interval 3 years 1 hours
3 years 1 hours
-- !query 59

View file

@ -5,190 +5,190 @@
-- !query 0
SELECT interval '999' second
-- !query 0 schema
struct<interval 16 minutes 39 seconds:interval>
struct<16 minutes 39 seconds:interval>
-- !query 0 output
interval 16 minutes 39 seconds
16 minutes 39 seconds
-- !query 1
SELECT interval '999' minute
-- !query 1 schema
struct<interval 16 hours 39 minutes:interval>
struct<16 hours 39 minutes:interval>
-- !query 1 output
interval 16 hours 39 minutes
16 hours 39 minutes
-- !query 2
SELECT interval '999' hour
-- !query 2 schema
struct<interval 999 hours:interval>
struct<999 hours:interval>
-- !query 2 output
interval 999 hours
999 hours
-- !query 3
SELECT interval '999' day
-- !query 3 schema
struct<interval 999 days:interval>
struct<999 days:interval>
-- !query 3 output
interval 999 days
999 days
-- !query 4
SELECT interval '999' month
-- !query 4 schema
struct<interval 83 years 3 months:interval>
struct<83 years 3 months:interval>
-- !query 4 output
interval 83 years 3 months
83 years 3 months
-- !query 5
SELECT interval '1' year
-- !query 5 schema
struct<interval 1 years:interval>
struct<1 years:interval>
-- !query 5 output
interval 1 years
1 years
-- !query 6
SELECT interval '2' month
-- !query 6 schema
struct<interval 2 months:interval>
struct<2 months:interval>
-- !query 6 output
interval 2 months
2 months
-- !query 7
SELECT interval '3' day
-- !query 7 schema
struct<interval 3 days:interval>
struct<3 days:interval>
-- !query 7 output
interval 3 days
3 days
-- !query 8
SELECT interval '4' hour
-- !query 8 schema
struct<interval 4 hours:interval>
struct<4 hours:interval>
-- !query 8 output
interval 4 hours
4 hours
-- !query 9
SELECT interval '5' minute
-- !query 9 schema
struct<interval 5 minutes:interval>
struct<5 minutes:interval>
-- !query 9 output
interval 5 minutes
5 minutes
-- !query 10
SELECT interval '6' second
-- !query 10 schema
struct<interval 6 seconds:interval>
struct<6 seconds:interval>
-- !query 10 output
interval 6 seconds
6 seconds
-- !query 11
SELECT interval '1-2' year to month
-- !query 11 schema
struct<interval 1 years 2 months:interval>
struct<1 years 2 months:interval>
-- !query 11 output
interval 1 years 2 months
1 years 2 months
-- !query 12
SELECT interval '1 2:03' day to hour
-- !query 12 schema
struct<interval 1 days 2 hours:interval>
struct<1 days 2 hours:interval>
-- !query 12 output
interval 1 days 2 hours
1 days 2 hours
-- !query 13
SELECT interval '1 2:03:04' day to hour
-- !query 13 schema
struct<interval 1 days 2 hours:interval>
struct<1 days 2 hours:interval>
-- !query 13 output
interval 1 days 2 hours
1 days 2 hours
-- !query 14
SELECT interval '1 2:03' day to minute
-- !query 14 schema
struct<interval 1 days 2 hours 3 minutes:interval>
struct<1 days 2 hours 3 minutes:interval>
-- !query 14 output
interval 1 days 2 hours 3 minutes
1 days 2 hours 3 minutes
-- !query 15
SELECT interval '1 2:03:04' day to minute
-- !query 15 schema
struct<interval 1 days 2 hours 3 minutes:interval>
struct<1 days 2 hours 3 minutes:interval>
-- !query 15 output
interval 1 days 2 hours 3 minutes
1 days 2 hours 3 minutes
-- !query 16
SELECT interval '1 2:03' day to second
-- !query 16 schema
struct<interval 1 days 2 hours 3 minutes:interval>
struct<1 days 2 hours 3 minutes:interval>
-- !query 16 output
interval 1 days 2 hours 3 minutes
1 days 2 hours 3 minutes
-- !query 17
SELECT interval '1 2:03:04' day to second
-- !query 17 schema
struct<interval 1 days 2 hours 3 minutes 4 seconds:interval>
struct<1 days 2 hours 3 minutes 4 seconds:interval>
-- !query 17 output
interval 1 days 2 hours 3 minutes 4 seconds
1 days 2 hours 3 minutes 4 seconds
-- !query 18
SELECT interval '1 2:03' hour to minute
-- !query 18 schema
struct<interval 1 days 2 hours 3 minutes:interval>
struct<1 days 2 hours 3 minutes:interval>
-- !query 18 output
interval 1 days 2 hours 3 minutes
1 days 2 hours 3 minutes
-- !query 19
SELECT interval '1 2:03:04' hour to minute
-- !query 19 schema
struct<interval 1 days 2 hours 3 minutes:interval>
struct<1 days 2 hours 3 minutes:interval>
-- !query 19 output
interval 1 days 2 hours 3 minutes
1 days 2 hours 3 minutes
-- !query 20
SELECT interval '1 2:03' hour to second
-- !query 20 schema
struct<interval 1 days 2 hours 3 minutes:interval>
struct<1 days 2 hours 3 minutes:interval>
-- !query 20 output
interval 1 days 2 hours 3 minutes
1 days 2 hours 3 minutes
-- !query 21
SELECT interval '1 2:03:04' hour to second
-- !query 21 schema
struct<interval 1 days 2 hours 3 minutes 4 seconds:interval>
struct<1 days 2 hours 3 minutes 4 seconds:interval>
-- !query 21 output
interval 1 days 2 hours 3 minutes 4 seconds
1 days 2 hours 3 minutes 4 seconds
-- !query 22
SELECT interval '1 2:03' minute to second
-- !query 22 schema
struct<interval 1 days 2 minutes 3 seconds:interval>
struct<1 days 2 minutes 3 seconds:interval>
-- !query 22 output
interval 1 days 2 minutes 3 seconds
1 days 2 minutes 3 seconds
-- !query 23
SELECT interval '1 2:03:04' minute to second
-- !query 23 schema
struct<interval 1 days 2 hours 3 minutes 4 seconds:interval>
struct<1 days 2 hours 3 minutes 4 seconds:interval>
-- !query 23 output
interval 1 days 2 hours 3 minutes 4 seconds
1 days 2 hours 3 minutes 4 seconds

View file

@ -16,7 +16,7 @@ select cast(1 as tinyint) + interval 2 day
struct<>
-- !query 1 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS TINYINT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) + interval 2 days)' (tinyint and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS TINYINT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) + 2 days)' (tinyint and interval).; line 1 pos 7
-- !query 2
@ -25,7 +25,7 @@ select cast(1 as smallint) + interval 2 day
struct<>
-- !query 2 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS SMALLINT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) + interval 2 days)' (smallint and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS SMALLINT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) + 2 days)' (smallint and interval).; line 1 pos 7
-- !query 3
@ -34,7 +34,7 @@ select cast(1 as int) + interval 2 day
struct<>
-- !query 3 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS INT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) + interval 2 days)' (int and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS INT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) + 2 days)' (int and interval).; line 1 pos 7
-- !query 4
@ -43,7 +43,7 @@ select cast(1 as bigint) + interval 2 day
struct<>
-- !query 4 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS BIGINT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) + interval 2 days)' (bigint and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS BIGINT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) + 2 days)' (bigint and interval).; line 1 pos 7
-- !query 5
@ -52,7 +52,7 @@ select cast(1 as float) + interval 2 day
struct<>
-- !query 5 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS FLOAT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) + interval 2 days)' (float and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS FLOAT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) + 2 days)' (float and interval).; line 1 pos 7
-- !query 6
@ -61,7 +61,7 @@ select cast(1 as double) + interval 2 day
struct<>
-- !query 6 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DOUBLE) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) + interval 2 days)' (double and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS DOUBLE) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) + 2 days)' (double and interval).; line 1 pos 7
-- !query 7
@ -70,13 +70,13 @@ select cast(1 as decimal(10, 0)) + interval 2 day
struct<>
-- !query 7 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DECIMAL(10,0)) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + interval 2 days)' (decimal(10,0) and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS DECIMAL(10,0)) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + 2 days)' (decimal(10,0) and interval).; line 1 pos 7
-- !query 8
select cast('2017-12-11' as string) + interval 2 day
-- !query 8 schema
struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) + interval 2 days AS STRING):string>
struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) + 2 days AS STRING):string>
-- !query 8 output
2017-12-13 00:00:00
@ -84,7 +84,7 @@ struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) + interval 2 days AS S
-- !query 9
select cast('2017-12-11 09:30:00' as string) + interval 2 day
-- !query 9 schema
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS STRING) AS TIMESTAMP) + interval 2 days AS STRING):string>
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS STRING) AS TIMESTAMP) + 2 days AS STRING):string>
-- !query 9 output
2017-12-13 09:30:00
@ -95,7 +95,7 @@ select cast('1' as binary) + interval 2 day
struct<>
-- !query 10 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST('1' AS BINARY) + interval 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + interval 2 days)' (binary and interval).; line 1 pos 7
cannot resolve '(CAST('1' AS BINARY) + 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + 2 days)' (binary and interval).; line 1 pos 7
-- !query 11
@ -104,13 +104,13 @@ select cast(1 as boolean) + interval 2 day
struct<>
-- !query 11 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS BOOLEAN) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + interval 2 days)' (boolean and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS BOOLEAN) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + 2 days)' (boolean and interval).; line 1 pos 7
-- !query 12
select cast('2017-12-11 09:30:00.0' as timestamp) + interval 2 day
-- !query 12 schema
struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + interval 2 days AS TIMESTAMP):timestamp>
struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + 2 days AS TIMESTAMP):timestamp>
-- !query 12 output
2017-12-13 09:30:00
@ -118,7 +118,7 @@ struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + interval 2 days AS TIMEST
-- !query 13
select cast('2017-12-11 09:30:00' as date) + interval 2 day
-- !query 13 schema
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS DATE) AS TIMESTAMP) + interval 2 days AS DATE):date>
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS DATE) AS TIMESTAMP) + 2 days AS DATE):date>
-- !query 13 output
2017-12-13
@ -129,7 +129,7 @@ select interval 2 day + cast(1 as tinyint)
struct<>
-- !query 14 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS TINYINT))' (interval and tinyint).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS TINYINT))' (interval and tinyint).; line 1 pos 7
-- !query 15
@ -138,7 +138,7 @@ select interval 2 day + cast(1 as smallint)
struct<>
-- !query 15 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS SMALLINT))' (interval and smallint).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS SMALLINT))' (interval and smallint).; line 1 pos 7
-- !query 16
@ -147,7 +147,7 @@ select interval 2 day + cast(1 as int)
struct<>
-- !query 16 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS INT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS INT))' (interval and int).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS INT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS INT))' (interval and int).; line 1 pos 7
-- !query 17
@ -156,7 +156,7 @@ select interval 2 day + cast(1 as bigint)
struct<>
-- !query 17 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS BIGINT))' (interval and bigint).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS BIGINT))' (interval and bigint).; line 1 pos 7
-- !query 18
@ -165,7 +165,7 @@ select interval 2 day + cast(1 as float)
struct<>
-- !query 18 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS FLOAT))' (interval and float).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS FLOAT))' (interval and float).; line 1 pos 7
-- !query 19
@ -174,7 +174,7 @@ select interval 2 day + cast(1 as double)
struct<>
-- !query 19 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS DOUBLE))' (interval and double).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(2 days + CAST(1 AS DOUBLE))' (interval and double).; line 1 pos 7
-- !query 20
@ -183,13 +183,13 @@ select interval 2 day + cast(1 as decimal(10, 0))
struct<>
-- !query 20 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS DECIMAL(10,0)))' (interval and decimal(10,0)).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(2 days + CAST(1 AS DECIMAL(10,0)))' (interval and decimal(10,0)).; line 1 pos 7
-- !query 21
select interval 2 day + cast('2017-12-11' as string)
-- !query 21 schema
struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) + interval 2 days AS STRING):string>
struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) + 2 days AS STRING):string>
-- !query 21 output
2017-12-13 00:00:00
@ -197,7 +197,7 @@ struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) + interval 2 days AS S
-- !query 22
select interval 2 day + cast('2017-12-11 09:30:00' as string)
-- !query 22 schema
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS STRING) AS TIMESTAMP) + interval 2 days AS STRING):string>
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS STRING) AS TIMESTAMP) + 2 days AS STRING):string>
-- !query 22 output
2017-12-13 09:30:00
@ -208,7 +208,7 @@ select interval 2 day + cast('1' as binary)
struct<>
-- !query 23 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(interval 2 days + CAST('1' AS BINARY))' (interval and binary).; line 1 pos 7
cannot resolve '(2 days + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(2 days + CAST('1' AS BINARY))' (interval and binary).; line 1 pos 7
-- !query 24
@ -217,13 +217,13 @@ select interval 2 day + cast(1 as boolean)
struct<>
-- !query 24 output
org.apache.spark.sql.AnalysisException
cannot resolve '(interval 2 days + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS BOOLEAN))' (interval and boolean).; line 1 pos 7
cannot resolve '(2 days + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(2 days + CAST(1 AS BOOLEAN))' (interval and boolean).; line 1 pos 7
-- !query 25
select interval 2 day + cast('2017-12-11 09:30:00.0' as timestamp)
-- !query 25 schema
struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + interval 2 days AS TIMESTAMP):timestamp>
struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + 2 days AS TIMESTAMP):timestamp>
-- !query 25 output
2017-12-13 09:30:00
@ -231,7 +231,7 @@ struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) + interval 2 days AS TIMEST
-- !query 26
select interval 2 day + cast('2017-12-11 09:30:00' as date)
-- !query 26 schema
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS DATE) AS TIMESTAMP) + interval 2 days AS DATE):date>
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS DATE) AS TIMESTAMP) + 2 days AS DATE):date>
-- !query 26 output
2017-12-13
@ -242,7 +242,7 @@ select cast(1 as tinyint) - interval 2 day
struct<>
-- !query 27 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS TINYINT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) - interval 2 days)' (tinyint and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS TINYINT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) - 2 days)' (tinyint and interval).; line 1 pos 7
-- !query 28
@ -251,7 +251,7 @@ select cast(1 as smallint) - interval 2 day
struct<>
-- !query 28 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS SMALLINT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) - interval 2 days)' (smallint and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS SMALLINT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) - 2 days)' (smallint and interval).; line 1 pos 7
-- !query 29
@ -260,7 +260,7 @@ select cast(1 as int) - interval 2 day
struct<>
-- !query 29 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS INT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) - interval 2 days)' (int and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS INT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) - 2 days)' (int and interval).; line 1 pos 7
-- !query 30
@ -269,7 +269,7 @@ select cast(1 as bigint) - interval 2 day
struct<>
-- !query 30 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS BIGINT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) - interval 2 days)' (bigint and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS BIGINT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) - 2 days)' (bigint and interval).; line 1 pos 7
-- !query 31
@ -278,7 +278,7 @@ select cast(1 as float) - interval 2 day
struct<>
-- !query 31 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS FLOAT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) - interval 2 days)' (float and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS FLOAT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) - 2 days)' (float and interval).; line 1 pos 7
-- !query 32
@ -287,7 +287,7 @@ select cast(1 as double) - interval 2 day
struct<>
-- !query 32 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DOUBLE) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) - interval 2 days)' (double and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS DOUBLE) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) - 2 days)' (double and interval).; line 1 pos 7
-- !query 33
@ -296,13 +296,13 @@ select cast(1 as decimal(10, 0)) - interval 2 day
struct<>
-- !query 33 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS DECIMAL(10,0)) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - interval 2 days)' (decimal(10,0) and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS DECIMAL(10,0)) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - 2 days)' (decimal(10,0) and interval).; line 1 pos 7
-- !query 34
select cast('2017-12-11' as string) - interval 2 day
-- !query 34 schema
struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) - interval 2 days AS STRING):string>
struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) - 2 days AS STRING):string>
-- !query 34 output
2017-12-09 00:00:00
@ -310,7 +310,7 @@ struct<CAST(CAST(CAST(2017-12-11 AS STRING) AS TIMESTAMP) - interval 2 days AS S
-- !query 35
select cast('2017-12-11 09:30:00' as string) - interval 2 day
-- !query 35 schema
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS STRING) AS TIMESTAMP) - interval 2 days AS STRING):string>
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS STRING) AS TIMESTAMP) - 2 days AS STRING):string>
-- !query 35 output
2017-12-09 09:30:00
@ -321,7 +321,7 @@ select cast('1' as binary) - interval 2 day
struct<>
-- !query 36 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST('1' AS BINARY) - interval 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - interval 2 days)' (binary and interval).; line 1 pos 7
cannot resolve '(CAST('1' AS BINARY) - 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - 2 days)' (binary and interval).; line 1 pos 7
-- !query 37
@ -330,13 +330,13 @@ select cast(1 as boolean) - interval 2 day
struct<>
-- !query 37 output
org.apache.spark.sql.AnalysisException
cannot resolve '(CAST(1 AS BOOLEAN) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - interval 2 days)' (boolean and interval).; line 1 pos 7
cannot resolve '(CAST(1 AS BOOLEAN) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - 2 days)' (boolean and interval).; line 1 pos 7
-- !query 38
select cast('2017-12-11 09:30:00.0' as timestamp) - interval 2 day
-- !query 38 schema
struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - interval 2 days AS TIMESTAMP):timestamp>
struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - 2 days AS TIMESTAMP):timestamp>
-- !query 38 output
2017-12-09 09:30:00
@ -344,6 +344,6 @@ struct<CAST(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - interval 2 days AS TIMEST
-- !query 39
select cast('2017-12-11 09:30:00' as date) - interval 2 day
-- !query 39 schema
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS DATE) AS TIMESTAMP) - interval 2 days AS DATE):date>
struct<CAST(CAST(CAST(2017-12-11 09:30:00 AS DATE) AS TIMESTAMP) - 2 days AS DATE):date>
-- !query 39 output
2017-12-09

View file

@ -154,7 +154,7 @@ SELECT val_timestamp, udf(cate), avg(val_timestamp) OVER(PARTITION BY udf(cate)
RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING) FROM testData
ORDER BY udf(cate), val_timestamp
-- !query 9 schema
struct<val_timestamp:timestamp,CAST(udf(cast(cate as string)) AS STRING):string,avg(CAST(val_timestamp AS DOUBLE)) OVER (PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING):double>
struct<val_timestamp:timestamp,CAST(udf(cast(cate as string)) AS STRING):string,avg(CAST(val_timestamp AS DOUBLE)) OVER (PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 23 days 4 hours FOLLOWING):double>
-- !query 9 output
NULL NULL NULL
2017-07-31 17:00:00 NULL 1.5015456E9

View file

@ -154,7 +154,7 @@ SELECT val_timestamp, cate, avg(val_timestamp) OVER(PARTITION BY cate ORDER BY v
RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING) FROM testData
ORDER BY cate, val_timestamp
-- !query 9 schema
struct<val_timestamp:timestamp,cate:string,avg(CAST(val_timestamp AS DOUBLE)) OVER (PARTITION BY cate ORDER BY val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING):double>
struct<val_timestamp:timestamp,cate:string,avg(CAST(val_timestamp AS DOUBLE)) OVER (PARTITION BY cate ORDER BY val_timestamp ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 23 days 4 hours FOLLOWING):double>
-- !query 9 output
NULL NULL NULL
2017-07-31 17:00:00 NULL 1.5015456E9

View file

@ -293,10 +293,10 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession {
val i = new CalendarInterval(2, 2, 2000000L)
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d + $i"),
df.selectExpr(s"d + INTERVAL'$i'"),
Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02"))))
checkAnswer(
df.selectExpr(s"t + $i"),
df.selectExpr(s"t + INTERVAL'$i'"),
Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")),
Row(Timestamp.valueOf("2016-03-02 00:00:02"))))
}
@ -309,10 +309,10 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession {
val i = new CalendarInterval(2, 2, 2000000L)
val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d")
checkAnswer(
df.selectExpr(s"d - $i"),
df.selectExpr(s"d - INTERVAL'$i'"),
Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26"))))
checkAnswer(
df.selectExpr(s"t - $i"),
df.selectExpr(s"t - INTERVAL'$i'"),
Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")),
Row(Timestamp.valueOf("2015-12-27 00:00:00"))))
}

View file

@ -218,15 +218,15 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession {
test("to_json - key types of map don't matter") {
// interval type is invalid for converting to JSON. However, the keys of a map are treated
// as strings, so its type doesn't matter.
val df = Seq(Tuple1(Tuple1("interval -3 month 7 hours"))).toDF("a")
val df = Seq(Tuple1(Tuple1("-3 month 7 hours"))).toDF("a")
.select(struct(map($"a._1".cast(CalendarIntervalType), lit("a")).as("col1")).as("c"))
checkAnswer(
df.select(to_json($"c")),
Row("""{"col1":{"interval -3 months 7 hours":"a"}}""") :: Nil)
Row("""{"col1":{"-3 months 7 hours":"a"}}""") :: Nil)
}
test("to_json unsupported type") {
val baseDf = Seq(Tuple1(Tuple1("interval -3 month 7 hours"))).toDF("a")
val baseDf = Seq(Tuple1(Tuple1("-3 month 7 hours"))).toDF("a")
val df = baseDf.select(struct($"a._1".cast(CalendarIntervalType).as("a")).as("c"))
val e = intercept[AnalysisException]{
// Unsupported type throws an exception

View file

@ -675,7 +675,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
withJdbcStatement() { statement =>
val rs = statement.executeQuery("SELECT interval 3 months 1 hours")
assert(rs.next())
assert(rs.getString(1) === "interval 3 months 1 hours")
assert(rs.getString(1) === "3 months 1 hours")
}
// Invalid interval value
withJdbcStatement() { statement =>

View file

@ -264,7 +264,7 @@ class SparkThriftServerProtocolVersionsSuite extends HiveThriftJdbcTest {
test(s"$version get interval type") {
testExecuteStatementWithProtocolVersion(version, "SELECT interval '1' year '2' day") { rs =>
assert(rs.next())
assert(rs.getString(1) === "interval 1 years 2 days")
assert(rs.getString(1) === "1 years 2 days")
}
}