From 9b61f90987d7ccf8bd74ec5078e4babff3c2a2b8 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Thu, 7 Nov 2019 15:44:50 +0800 Subject: [PATCH] [SPARK-29761][SQL] do not output leading 'interval' in CalendarInterval.toString ### What changes were proposed in this pull request? remove the leading "interval" in `CalendarInterval.toString`. ### Why are the changes needed? Although it's allowed to have "interval" prefix when casting string to int, it's not recommended. This is also consistent with pgsql: ``` cloud0fan=# select interval '1' day; interval ---------- 1 day (1 row) ``` ### Does this PR introduce any user-facing change? yes, when display a dataframe with interval type column, the result is different. ### How was this patch tested? updated tests. Closes #26401 from cloud-fan/interval. Authored-by: Wenchen Fan Signed-off-by: Wenchen Fan --- .../spark/unsafe/types/CalendarInterval.java | 27 +++--- .../unsafe/types/CalendarIntervalSuite.java | 16 ++-- docs/sql-migration-guide.md | 2 + .../catalyst/expressions/aggregate/Sum.scala | 2 +- .../sql/catalyst/expressions/CastSuite.scala | 2 +- .../ExpressionSQLBuilderSuite.scala | 4 +- .../sql-tests/results/ansi/interval.sql.out | 70 +++++++------- .../resources/sql-tests/results/cast.sql.out | 6 +- .../sql-tests/results/datetime.sql.out | 22 ++--- .../sql-tests/results/group-by.sql.out | 18 ++-- .../sql-tests/results/literals.sql.out | 72 +++++++------- .../results/postgreSQL/interval.sql.out | 96 +++++++++---------- .../native/dateTimeOperations.sql.out | 78 +++++++-------- .../sql-tests/results/udf/udf-window.sql.out | 2 +- .../sql-tests/results/window.sql.out | 2 +- .../apache/spark/sql/DateFunctionsSuite.scala | 8 +- .../apache/spark/sql/JsonFunctionsSuite.scala | 6 +- .../HiveThriftServer2Suites.scala | 2 +- ...arkThriftServerProtocolVersionsSuite.scala | 2 +- 19 files changed, 221 insertions(+), 216 deletions(-) diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/CalendarInterval.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/CalendarInterval.java index 5e8b334550..314fb98ace 100644 --- a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/CalendarInterval.java +++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/CalendarInterval.java @@ -84,35 +84,38 @@ public final class CalendarInterval implements Serializable { @Override public String toString() { - StringBuilder sb = new StringBuilder("interval"); - - if (months != 0) { - appendUnit(sb, months / 12, "year"); - appendUnit(sb, months % 12, "month"); + if (months == 0 && days == 0 && microseconds == 0) { + return "0 seconds"; } - appendUnit(sb, days, "day"); + StringBuilder sb = new StringBuilder(); + + if (months != 0) { + appendUnit(sb, months / 12, "years"); + appendUnit(sb, months % 12, "months"); + } + + appendUnit(sb, days, "days"); if (microseconds != 0) { long rest = microseconds; - appendUnit(sb, rest / MICROS_PER_HOUR, "hour"); + appendUnit(sb, rest / MICROS_PER_HOUR, "hours"); rest %= MICROS_PER_HOUR; - appendUnit(sb, rest / MICROS_PER_MINUTE, "minute"); + appendUnit(sb, rest / MICROS_PER_MINUTE, "minutes"); rest %= MICROS_PER_MINUTE; if (rest != 0) { String s = BigDecimal.valueOf(rest, 6).stripTrailingZeros().toPlainString(); - sb.append(' ').append(s).append(" seconds"); + sb.append(s).append(" seconds "); } - } else if (months == 0 && days == 0) { - sb.append(" 0 microseconds"); } + sb.setLength(sb.length() - 1); return sb.toString(); } private void appendUnit(StringBuilder sb, long value, String unit) { if (value != 0) { - sb.append(' ').append(value).append(' ').append(unit).append('s'); + sb.append(value).append(' ').append(unit).append(' '); } } diff --git a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java index 7f607e65ea..1f8dcb0e84 100644 --- a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java +++ b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java @@ -51,28 +51,28 @@ public class CalendarIntervalSuite { CalendarInterval i; i = new CalendarInterval(0, 0, 0); - assertEquals("interval 0 microseconds", i.toString()); + assertEquals("0 seconds", i.toString()); i = new CalendarInterval(34, 0, 0); - assertEquals("interval 2 years 10 months", i.toString()); + assertEquals("2 years 10 months", i.toString()); i = new CalendarInterval(-34, 0, 0); - assertEquals("interval -2 years -10 months", i.toString()); + assertEquals("-2 years -10 months", i.toString()); i = new CalendarInterval(0, 31, 0); - assertEquals("interval 31 days", i.toString()); + assertEquals("31 days", i.toString()); i = new CalendarInterval(0, -31, 0); - assertEquals("interval -31 days", i.toString()); + assertEquals("-31 days", i.toString()); i = new CalendarInterval(0, 0, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123); - assertEquals("interval 3 hours 13 minutes 0.000123 seconds", i.toString()); + assertEquals("3 hours 13 minutes 0.000123 seconds", i.toString()); i = new CalendarInterval(0, 0, -3 * MICROS_PER_HOUR - 13 * MICROS_PER_MINUTE - 123); - assertEquals("interval -3 hours -13 minutes -0.000123 seconds", i.toString()); + assertEquals("-3 hours -13 minutes -0.000123 seconds", i.toString()); i = new CalendarInterval(34, 31, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123); - assertEquals("interval 2 years 10 months 31 days 3 hours 13 minutes 0.000123 seconds", + assertEquals("2 years 10 months 31 days 3 hours 13 minutes 0.000123 seconds", i.toString()); } diff --git a/docs/sql-migration-guide.md b/docs/sql-migration-guide.md index 3db3eca986..153e68b58e 100644 --- a/docs/sql-migration-guide.md +++ b/docs/sql-migration-guide.md @@ -222,6 +222,8 @@ license: | - Since Spark 3.0, the interval literal syntax does not allow multiple from-to units anymore. For example, `SELECT INTERVAL '1-1' YEAR TO MONTH '2-2' YEAR TO MONTH'` throws parser exception. + - Since Spark 3.0, when casting interval values to string type, there is no "interval" prefix, e.g. `1 days 2 hours`. In Spark version 2.4 and earlier, the string contains the "interval" prefix like `interval 1 days 2 hours`. + ## Upgrading from Spark SQL 2.4 to 2.4.1 - The value of `spark.executor.heartbeatInterval`, when specified without units like "30" rather than "30s", was diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala index 843c361233..87f1a4f02e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala @@ -34,7 +34,7 @@ import org.apache.spark.sql.types._ > SELECT _FUNC_(col) FROM VALUES (NULL), (NULL) AS tab(col); NULL > SELECT _FUNC_(cast(col as interval)) FROM VALUES ('1 seconds'), ('2 seconds'), (null) tab(col); - interval 3 seconds + 3 seconds """, since = "1.0.0") // scalastyle:on line.size.limit diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala index 42540036d8..b6ca460c78 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala @@ -669,7 +669,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper { checkEvaluation(Cast(Literal.create( new CalendarInterval(15, 9, -3 * CalendarInterval.MICROS_PER_HOUR), CalendarIntervalType), StringType), - "interval 1 years 3 months 9 days -3 hours") + "1 years 3 months 9 days -3 hours") checkEvaluation(Cast(Literal("INTERVAL 1 Second 1 microsecond"), CalendarIntervalType), new CalendarInterval(0, 0, 1000001)) checkEvaluation(Cast(Literal("1 MONTH 1 Microsecond"), CalendarIntervalType), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala index 28fd4f6310..3447ac6ba4 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala @@ -169,12 +169,12 @@ class ExpressionSQLBuilderSuite extends SparkFunSuite { checkSQL( TimeAdd('a, interval), - "`a` + interval 1 hours" + "`a` + 1 hours" ) checkSQL( TimeSub('a, interval), - "`a` - interval 1 hours" + "`a` - 1 hours" ) } } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index d0087bbcce..c6392617e2 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -25,9 +25,9 @@ select '1' year, 2 years -- !query 1 schema -struct +struct<1 seconds:interval,2 seconds:interval,1 minutes:interval,2 minutes:interval,1 hours:interval,2 hours:interval,1 days:interval,2 days:interval,1 months:interval,2 months:interval,1 years:interval,2 years:interval> -- !query 1 output -interval 1 seconds interval 2 seconds interval 1 minutes interval 2 minutes interval 1 hours interval 2 hours interval 1 days interval 2 days interval 1 months interval 2 months interval 1 years interval 2 years +1 seconds 2 seconds 1 minutes 2 minutes 1 hours 2 hours 1 days 2 days 1 months 2 months 1 years 2 years -- !query 2 @@ -36,9 +36,9 @@ select interval '10' year, interval '11' month -- !query 2 schema -struct +struct<10 years 11 months:interval,10 years:interval,11 months:interval> -- !query 2 output -interval 10 years 11 months interval 10 years interval 11 months +10 years 11 months 10 years 11 months -- !query 3 @@ -47,9 +47,9 @@ select '10' year, '11' month -- !query 3 schema -struct +struct<10 years 11 months:interval,10 years:interval,11 months:interval> -- !query 3 output -interval 10 years 11 months interval 10 years interval 11 months +10 years 11 months 10 years 11 months -- !query 4 @@ -61,9 +61,9 @@ select interval '13' second, interval '13.123456789' second -- !query 4 schema -struct +struct<10 days 9 hours 8 minutes 7.987654 seconds:interval,10 days:interval,11 hours:interval,12 minutes:interval,13 seconds:interval,13.123456 seconds:interval> -- !query 4 output -interval 10 days 9 hours 8 minutes 7.987654 seconds interval 10 days interval 11 hours interval 12 minutes interval 13 seconds interval 13.123456 seconds +10 days 9 hours 8 minutes 7.987654 seconds 10 days 11 hours 12 minutes 13 seconds 13.123456 seconds -- !query 5 @@ -75,25 +75,25 @@ select '13' second, '13.123456789' second -- !query 5 schema -struct +struct<10 days 9 hours 8 minutes 7.987654 seconds:interval,10 days:interval,11 hours:interval,12 minutes:interval,13 seconds:interval,13.123456 seconds:interval> -- !query 5 output -interval 10 days 9 hours 8 minutes 7.987654 seconds interval 10 days interval 11 hours interval 12 minutes interval 13 seconds interval 13.123456 seconds +10 days 9 hours 8 minutes 7.987654 seconds 10 days 11 hours 12 minutes 13 seconds 13.123456 seconds -- !query 6 select map(1, interval 1 day, 2, interval 3 week) -- !query 6 schema -struct> +struct> -- !query 6 output -{1:interval 1 days,2:interval 21 days} +{1:1 days,2:21 days} -- !query 7 select map(1, 1 day, 2, 3 week) -- !query 7 schema -struct> +struct> -- !query 7 output -{1:interval 1 days,2:interval 21 days} +{1:1 days,2:21 days} -- !query 8 @@ -118,7 +118,7 @@ select interval '2-2' year to month + dateval from interval_arithmetic -- !query 9 schema -struct +struct -- !query 9 output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -134,7 +134,7 @@ select '2-2' year to month + dateval from interval_arithmetic -- !query 10 schema -struct +struct -- !query 10 output 2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 @@ -150,7 +150,7 @@ select interval '2-2' year to month + tsval from interval_arithmetic -- !query 11 schema -struct +struct -- !query 11 output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -166,7 +166,7 @@ select '2-2' year to month + tsval from interval_arithmetic -- !query 12 schema -struct +struct -- !query 12 output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -177,9 +177,9 @@ select interval '2-2' year to month - interval '3-3' year to month from interval_arithmetic -- !query 13 schema -struct<(interval 2 years 2 months + interval 3 years 3 months):interval,(interval 2 years 2 months - interval 3 years 3 months):interval> +struct<(2 years 2 months + 3 years 3 months):interval,(2 years 2 months - 3 years 3 months):interval> -- !query 13 output -interval 5 years 5 months interval -1 years -1 months +5 years 5 months -1 years -1 months -- !query 14 @@ -188,9 +188,9 @@ select '2-2' year to month - '3-3' year to month from interval_arithmetic -- !query 14 schema -struct<(interval 2 years 2 months + interval 3 years 3 months):interval,(interval 2 years 2 months - interval 3 years 3 months):interval> +struct<(2 years 2 months + 3 years 3 months):interval,(2 years 2 months - 3 years 3 months):interval> -- !query 14 output -interval 5 years 5 months interval -1 years -1 months +5 years 5 months -1 years -1 months -- !query 15 @@ -204,7 +204,7 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query 15 schema -struct +struct -- !query 15 output 2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 @@ -220,7 +220,7 @@ select '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query 16 schema -struct +struct -- !query 16 output 2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 @@ -236,7 +236,7 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query 17 schema -struct +struct -- !query 17 output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -252,7 +252,7 @@ select '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query 18 schema -struct +struct -- !query 18 output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 @@ -263,9 +263,9 @@ select interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second from interval_arithmetic -- !query 19 schema -struct<(interval 99 days 11 hours 22 minutes 33.123456 seconds + interval 10 days 9 hours 8 minutes 7.123456 seconds):interval,(interval 99 days 11 hours 22 minutes 33.123456 seconds - interval 10 days 9 hours 8 minutes 7.123456 seconds):interval> +struct<(99 days 11 hours 22 minutes 33.123456 seconds + 10 days 9 hours 8 minutes 7.123456 seconds):interval,(99 days 11 hours 22 minutes 33.123456 seconds - 10 days 9 hours 8 minutes 7.123456 seconds):interval> -- !query 19 output -interval 109 days 20 hours 30 minutes 40.246912 seconds interval 89 days 2 hours 14 minutes 26 seconds +109 days 20 hours 30 minutes 40.246912 seconds 89 days 2 hours 14 minutes 26 seconds -- !query 20 @@ -274,17 +274,17 @@ select '99 11:22:33.123456789' day to second - '10 9:8:7.123456789' day to second from interval_arithmetic -- !query 20 schema -struct<(interval 99 days 11 hours 22 minutes 33.123456 seconds + interval 10 days 9 hours 8 minutes 7.123456 seconds):interval,(interval 99 days 11 hours 22 minutes 33.123456 seconds - interval 10 days 9 hours 8 minutes 7.123456 seconds):interval> +struct<(99 days 11 hours 22 minutes 33.123456 seconds + 10 days 9 hours 8 minutes 7.123456 seconds):interval,(99 days 11 hours 22 minutes 33.123456 seconds - 10 days 9 hours 8 minutes 7.123456 seconds):interval> -- !query 20 output -interval 109 days 20 hours 30 minutes 40.246912 seconds interval 89 days 2 hours 14 minutes 26 seconds +109 days 20 hours 30 minutes 40.246912 seconds 89 days 2 hours 14 minutes 26 seconds -- !query 21 select 30 day -- !query 21 schema -struct +struct<30 days:interval> -- !query 21 output -interval 30 days +30 days -- !query 22 @@ -318,7 +318,7 @@ select 30 day day day -- !query 24 select date '2012-01-01' - 30 day -- !query 24 schema -struct +struct -- !query 24 output 2011-12-02 @@ -354,7 +354,7 @@ select date '2012-01-01' - 30 day day day -- !query 27 select date '2012-01-01' + '-30' day -- !query 27 schema -struct +struct -- !query 27 output 2011-12-02 @@ -362,7 +362,7 @@ struct +struct -- !query 28 output 2011-12-02 diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index adad21f049..4884f9d491 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -278,12 +278,12 @@ SELECT CAST('interval 3 month 1 hour' AS interval) -- !query 33 schema struct -- !query 33 output -interval 3 months 1 hours +3 months 1 hours -- !query 34 SELECT CAST(interval 3 month 1 hour AS string) -- !query 34 schema -struct +struct -- !query 34 output -interval 3 months 1 hours +3 months 1 hours diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index d003854570..b5ea7d66fd 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -128,7 +128,7 @@ select date '2001-10-01' - date '2001-09-28' -- !query 14 schema struct -- !query 14 output -interval 3 days +3 days -- !query 15 @@ -136,7 +136,7 @@ select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' -- !query 15 schema struct -- !query 15 output -interval 2078 hours 48 minutes 47.654322 seconds +2078 hours 48 minutes 47.654322 seconds -- !query 16 @@ -144,7 +144,7 @@ select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query 16 schema struct -- !query 16 output -interval -2078 hours -48 minutes -47.654322 seconds +-2078 hours -48 minutes -47.654322 seconds -- !query 17 @@ -152,15 +152,15 @@ select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15') -- !query 17 schema struct -- !query 17 output -interval 30 hours 33 minutes 36.003006 seconds +30 hours 33 minutes 36.003006 seconds -- !query 18 select interval 4 month 2 weeks 3 microseconds * 1.5 -- !query 18 schema -struct +struct -- !query 18 output -interval 6 months 21 days 0.000005 seconds +6 months 21 days 0.000005 seconds -- !query 19 @@ -168,13 +168,13 @@ select (timestamp'2019-10-15' - timestamp'2019-10-14') / 1.5 -- !query 19 schema struct -- !query 19 output -interval 16 hours +16 hours -- !query 20 select interval '2 seconds' / 0 -- !query 20 schema -struct +struct -- !query 20 output NULL @@ -182,7 +182,7 @@ NULL -- !query 21 select interval '2 seconds' / null -- !query 21 schema -struct +struct -- !query 21 output NULL @@ -190,7 +190,7 @@ NULL -- !query 22 select interval '2 seconds' * null -- !query 22 schema -struct +struct -- !query 22 output NULL @@ -198,6 +198,6 @@ NULL -- !query 23 select null * interval '2 seconds' -- !query 23 schema -struct +struct -- !query 23 output NULL diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index eed6e02798..0417bfb070 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -596,7 +596,7 @@ select sum(cast(v as interval)) from VALUES ('1 seconds'), ('2 seconds'), (null) -- !query 58 schema struct -- !query 58 output -interval 3 seconds +3 seconds -- !query 59 @@ -604,7 +604,7 @@ select sum(cast(v as interval)) from VALUES ('-1 seconds'), ('2 seconds'), (null -- !query 59 schema struct -- !query 59 output -interval 1 seconds +1 seconds -- !query 60 @@ -612,7 +612,7 @@ select sum(cast(v as interval)) from VALUES ('-1 seconds'), ('-2 seconds'), (nul -- !query 60 schema struct -- !query 60 output -interval -3 seconds +-3 seconds -- !query 61 @@ -620,7 +620,7 @@ select sum(cast(v as interval)) from VALUES ('-1 weeks'), ('2 seconds'), (null) -- !query 61 schema struct -- !query 61 output -interval -7 days 2 seconds +-7 days 2 seconds -- !query 62 @@ -632,8 +632,8 @@ group by i -- !query 62 schema struct -- !query 62 output -1 interval -2 days -2 interval 2 seconds +1 -2 days +2 2 seconds 3 NULL @@ -645,7 +645,7 @@ having sv is not null -- !query 63 schema struct -- !query 63 output -interval -2 days 2 seconds +-2 days 2 seconds -- !query 64 @@ -656,7 +656,7 @@ FROM VALUES(1,'1 seconds'),(1,'2 seconds'),(2,NULL),(2,NULL) t(i,v) -- !query 64 schema struct -- !query 64 output -1 interval 2 seconds -1 interval 3 seconds +1 2 seconds +1 3 seconds 2 NULL 2 NULL diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index dc888be5d5..e9aa046717 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -323,121 +323,121 @@ select timestamp '2016-33-11 20:54:00.000' -- !query 34 select interval 13.123456789 seconds, interval -13.123456789 second -- !query 34 schema -struct +struct<13.123456 seconds:interval,-13.123456 seconds:interval> -- !query 34 output -interval 13.123456 seconds interval -13.123456 seconds +13.123456 seconds -13.123456 seconds -- !query 35 select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond -- !query 35 schema -struct +struct<1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds:interval> -- !query 35 output -interval 1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds +1 years 2 months 25 days 5 hours 6 minutes 7.008009 seconds -- !query 36 select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second -- !query 36 schema -struct +struct<32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds:interval> -- !query 36 output -interval 32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds +32 years 1 months -100 days 41 hours 24 minutes 59.889987 seconds -- !query 37 select interval '0 0:0:0.1' day to second -- !query 37 schema -struct +struct<0.1 seconds:interval> -- !query 37 output -interval 0.1 seconds +0.1 seconds -- !query 38 select interval '10-9' year to month -- !query 38 schema -struct +struct<10 years 9 months:interval> -- !query 38 output -interval 10 years 9 months +10 years 9 months -- !query 39 select interval '20 15:40:32.99899999' day to hour -- !query 39 schema -struct +struct<20 days 15 hours:interval> -- !query 39 output -interval 20 days 15 hours +20 days 15 hours -- !query 40 select interval '20 15:40:32.99899999' day to minute -- !query 40 schema -struct +struct<20 days 15 hours 40 minutes:interval> -- !query 40 output -interval 20 days 15 hours 40 minutes +20 days 15 hours 40 minutes -- !query 41 select interval '20 15:40:32.99899999' day to second -- !query 41 schema -struct +struct<20 days 15 hours 40 minutes 32.998999 seconds:interval> -- !query 41 output -interval 20 days 15 hours 40 minutes 32.998999 seconds +20 days 15 hours 40 minutes 32.998999 seconds -- !query 42 select interval '15:40:32.99899999' hour to minute -- !query 42 schema -struct +struct<15 hours 40 minutes:interval> -- !query 42 output -interval 15 hours 40 minutes +15 hours 40 minutes -- !query 43 select interval '15:40.99899999' hour to second -- !query 43 schema -struct +struct<15 minutes 40.998999 seconds:interval> -- !query 43 output -interval 15 minutes 40.998999 seconds +15 minutes 40.998999 seconds -- !query 44 select interval '15:40' hour to second -- !query 44 schema -struct +struct<15 hours 40 minutes:interval> -- !query 44 output -interval 15 hours 40 minutes +15 hours 40 minutes -- !query 45 select interval '15:40:32.99899999' hour to second -- !query 45 schema -struct +struct<15 hours 40 minutes 32.998999 seconds:interval> -- !query 45 output -interval 15 hours 40 minutes 32.998999 seconds +15 hours 40 minutes 32.998999 seconds -- !query 46 select interval '20 40:32.99899999' minute to second -- !query 46 schema -struct +struct<20 days 40 minutes 32.998999 seconds:interval> -- !query 46 output -interval 20 days 40 minutes 32.998999 seconds +20 days 40 minutes 32.998999 seconds -- !query 47 select interval '40:32.99899999' minute to second -- !query 47 schema -struct +struct<40 minutes 32.998999 seconds:interval> -- !query 47 output -interval 40 minutes 32.998999 seconds +40 minutes 32.998999 seconds -- !query 48 select interval '40:32' minute to second -- !query 48 schema -struct +struct<40 minutes 32 seconds:interval> -- !query 48 output -interval 40 minutes 32 seconds +40 minutes 32 seconds -- !query 49 @@ -523,25 +523,25 @@ struct<3.14:decimal(3,2),-3.14:decimal(3,2),3.14E+8:decimal(3,-6),3.14E-8:decima -- !query 56 select map(1, interval 1 day, 2, interval 3 week) -- !query 56 schema -struct> +struct> -- !query 56 output -{1:interval 1 days,2:interval 21 days} +{1:1 days,2:21 days} -- !query 57 select interval 'interval 3 year 1 hour' -- !query 57 schema -struct +struct<3 years 1 hours:interval> -- !query 57 output -interval 3 years 1 hours +3 years 1 hours -- !query 58 select interval '3 year 1 hour' -- !query 58 schema -struct +struct<3 years 1 hours:interval> -- !query 58 output -interval 3 years 1 hours +3 years 1 hours -- !query 59 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index bd4825003a..019068c9b4 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -5,190 +5,190 @@ -- !query 0 SELECT interval '999' second -- !query 0 schema -struct +struct<16 minutes 39 seconds:interval> -- !query 0 output -interval 16 minutes 39 seconds +16 minutes 39 seconds -- !query 1 SELECT interval '999' minute -- !query 1 schema -struct +struct<16 hours 39 minutes:interval> -- !query 1 output -interval 16 hours 39 minutes +16 hours 39 minutes -- !query 2 SELECT interval '999' hour -- !query 2 schema -struct +struct<999 hours:interval> -- !query 2 output -interval 999 hours +999 hours -- !query 3 SELECT interval '999' day -- !query 3 schema -struct +struct<999 days:interval> -- !query 3 output -interval 999 days +999 days -- !query 4 SELECT interval '999' month -- !query 4 schema -struct +struct<83 years 3 months:interval> -- !query 4 output -interval 83 years 3 months +83 years 3 months -- !query 5 SELECT interval '1' year -- !query 5 schema -struct +struct<1 years:interval> -- !query 5 output -interval 1 years +1 years -- !query 6 SELECT interval '2' month -- !query 6 schema -struct +struct<2 months:interval> -- !query 6 output -interval 2 months +2 months -- !query 7 SELECT interval '3' day -- !query 7 schema -struct +struct<3 days:interval> -- !query 7 output -interval 3 days +3 days -- !query 8 SELECT interval '4' hour -- !query 8 schema -struct +struct<4 hours:interval> -- !query 8 output -interval 4 hours +4 hours -- !query 9 SELECT interval '5' minute -- !query 9 schema -struct +struct<5 minutes:interval> -- !query 9 output -interval 5 minutes +5 minutes -- !query 10 SELECT interval '6' second -- !query 10 schema -struct +struct<6 seconds:interval> -- !query 10 output -interval 6 seconds +6 seconds -- !query 11 SELECT interval '1-2' year to month -- !query 11 schema -struct +struct<1 years 2 months:interval> -- !query 11 output -interval 1 years 2 months +1 years 2 months -- !query 12 SELECT interval '1 2:03' day to hour -- !query 12 schema -struct +struct<1 days 2 hours:interval> -- !query 12 output -interval 1 days 2 hours +1 days 2 hours -- !query 13 SELECT interval '1 2:03:04' day to hour -- !query 13 schema -struct +struct<1 days 2 hours:interval> -- !query 13 output -interval 1 days 2 hours +1 days 2 hours -- !query 14 SELECT interval '1 2:03' day to minute -- !query 14 schema -struct +struct<1 days 2 hours 3 minutes:interval> -- !query 14 output -interval 1 days 2 hours 3 minutes +1 days 2 hours 3 minutes -- !query 15 SELECT interval '1 2:03:04' day to minute -- !query 15 schema -struct +struct<1 days 2 hours 3 minutes:interval> -- !query 15 output -interval 1 days 2 hours 3 minutes +1 days 2 hours 3 minutes -- !query 16 SELECT interval '1 2:03' day to second -- !query 16 schema -struct +struct<1 days 2 hours 3 minutes:interval> -- !query 16 output -interval 1 days 2 hours 3 minutes +1 days 2 hours 3 minutes -- !query 17 SELECT interval '1 2:03:04' day to second -- !query 17 schema -struct +struct<1 days 2 hours 3 minutes 4 seconds:interval> -- !query 17 output -interval 1 days 2 hours 3 minutes 4 seconds +1 days 2 hours 3 minutes 4 seconds -- !query 18 SELECT interval '1 2:03' hour to minute -- !query 18 schema -struct +struct<1 days 2 hours 3 minutes:interval> -- !query 18 output -interval 1 days 2 hours 3 minutes +1 days 2 hours 3 minutes -- !query 19 SELECT interval '1 2:03:04' hour to minute -- !query 19 schema -struct +struct<1 days 2 hours 3 minutes:interval> -- !query 19 output -interval 1 days 2 hours 3 minutes +1 days 2 hours 3 minutes -- !query 20 SELECT interval '1 2:03' hour to second -- !query 20 schema -struct +struct<1 days 2 hours 3 minutes:interval> -- !query 20 output -interval 1 days 2 hours 3 minutes +1 days 2 hours 3 minutes -- !query 21 SELECT interval '1 2:03:04' hour to second -- !query 21 schema -struct +struct<1 days 2 hours 3 minutes 4 seconds:interval> -- !query 21 output -interval 1 days 2 hours 3 minutes 4 seconds +1 days 2 hours 3 minutes 4 seconds -- !query 22 SELECT interval '1 2:03' minute to second -- !query 22 schema -struct +struct<1 days 2 minutes 3 seconds:interval> -- !query 22 output -interval 1 days 2 minutes 3 seconds +1 days 2 minutes 3 seconds -- !query 23 SELECT interval '1 2:03:04' minute to second -- !query 23 schema -struct +struct<1 days 2 hours 3 minutes 4 seconds:interval> -- !query 23 output -interval 1 days 2 hours 3 minutes 4 seconds +1 days 2 hours 3 minutes 4 seconds diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index a4cd408c04..462ad63aaa 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -16,7 +16,7 @@ select cast(1 as tinyint) + interval 2 day struct<> -- !query 1 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) + interval 2 days)' (tinyint and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS TINYINT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) + 2 days)' (tinyint and interval).; line 1 pos 7 -- !query 2 @@ -25,7 +25,7 @@ select cast(1 as smallint) + interval 2 day struct<> -- !query 2 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) + interval 2 days)' (smallint and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS SMALLINT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) + 2 days)' (smallint and interval).; line 1 pos 7 -- !query 3 @@ -34,7 +34,7 @@ select cast(1 as int) + interval 2 day struct<> -- !query 3 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) + interval 2 days)' (int and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS INT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) + 2 days)' (int and interval).; line 1 pos 7 -- !query 4 @@ -43,7 +43,7 @@ select cast(1 as bigint) + interval 2 day struct<> -- !query 4 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) + interval 2 days)' (bigint and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS BIGINT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) + 2 days)' (bigint and interval).; line 1 pos 7 -- !query 5 @@ -52,7 +52,7 @@ select cast(1 as float) + interval 2 day struct<> -- !query 5 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) + interval 2 days)' (float and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS FLOAT) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) + 2 days)' (float and interval).; line 1 pos 7 -- !query 6 @@ -61,7 +61,7 @@ select cast(1 as double) + interval 2 day struct<> -- !query 6 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) + interval 2 days)' (double and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS DOUBLE) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) + 2 days)' (double and interval).; line 1 pos 7 -- !query 7 @@ -70,13 +70,13 @@ select cast(1 as decimal(10, 0)) + interval 2 day struct<> -- !query 7 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + interval 2 days)' (decimal(10,0) and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(10,0)) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + 2 days)' (decimal(10,0) and interval).; line 1 pos 7 -- !query 8 select cast('2017-12-11' as string) + interval 2 day -- !query 8 schema -struct +struct -- !query 8 output 2017-12-13 00:00:00 @@ -84,7 +84,7 @@ struct +struct -- !query 9 output 2017-12-13 09:30:00 @@ -95,7 +95,7 @@ select cast('1' as binary) + interval 2 day struct<> -- !query 10 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) + interval 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + interval 2 days)' (binary and interval).; line 1 pos 7 +cannot resolve '(CAST('1' AS BINARY) + 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + 2 days)' (binary and interval).; line 1 pos 7 -- !query 11 @@ -104,13 +104,13 @@ select cast(1 as boolean) + interval 2 day struct<> -- !query 11 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) + interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + interval 2 days)' (boolean and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS BOOLEAN) + 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + 2 days)' (boolean and interval).; line 1 pos 7 -- !query 12 select cast('2017-12-11 09:30:00.0' as timestamp) + interval 2 day -- !query 12 schema -struct +struct -- !query 12 output 2017-12-13 09:30:00 @@ -118,7 +118,7 @@ struct +struct -- !query 13 output 2017-12-13 @@ -129,7 +129,7 @@ select interval 2 day + cast(1 as tinyint) struct<> -- !query 14 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS TINYINT))' (interval and tinyint).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS TINYINT))' (interval and tinyint).; line 1 pos 7 -- !query 15 @@ -138,7 +138,7 @@ select interval 2 day + cast(1 as smallint) struct<> -- !query 15 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS SMALLINT))' (interval and smallint).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS SMALLINT))' (interval and smallint).; line 1 pos 7 -- !query 16 @@ -147,7 +147,7 @@ select interval 2 day + cast(1 as int) struct<> -- !query 16 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS INT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS INT))' (interval and int).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS INT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS INT))' (interval and int).; line 1 pos 7 -- !query 17 @@ -156,7 +156,7 @@ select interval 2 day + cast(1 as bigint) struct<> -- !query 17 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS BIGINT))' (interval and bigint).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS BIGINT))' (interval and bigint).; line 1 pos 7 -- !query 18 @@ -165,7 +165,7 @@ select interval 2 day + cast(1 as float) struct<> -- !query 18 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS FLOAT))' (interval and float).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(2 days + CAST(1 AS FLOAT))' (interval and float).; line 1 pos 7 -- !query 19 @@ -174,7 +174,7 @@ select interval 2 day + cast(1 as double) struct<> -- !query 19 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS DOUBLE))' (interval and double).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(2 days + CAST(1 AS DOUBLE))' (interval and double).; line 1 pos 7 -- !query 20 @@ -183,13 +183,13 @@ select interval 2 day + cast(1 as decimal(10, 0)) struct<> -- !query 20 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS DECIMAL(10,0)))' (interval and decimal(10,0)).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(2 days + CAST(1 AS DECIMAL(10,0)))' (interval and decimal(10,0)).; line 1 pos 7 -- !query 21 select interval 2 day + cast('2017-12-11' as string) -- !query 21 schema -struct +struct -- !query 21 output 2017-12-13 00:00:00 @@ -197,7 +197,7 @@ struct +struct -- !query 22 output 2017-12-13 09:30:00 @@ -208,7 +208,7 @@ select interval 2 day + cast('1' as binary) struct<> -- !query 23 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(interval 2 days + CAST('1' AS BINARY))' (interval and binary).; line 1 pos 7 +cannot resolve '(2 days + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(2 days + CAST('1' AS BINARY))' (interval and binary).; line 1 pos 7 -- !query 24 @@ -217,13 +217,13 @@ select interval 2 day + cast(1 as boolean) struct<> -- !query 24 output org.apache.spark.sql.AnalysisException -cannot resolve '(interval 2 days + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(interval 2 days + CAST(1 AS BOOLEAN))' (interval and boolean).; line 1 pos 7 +cannot resolve '(2 days + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(2 days + CAST(1 AS BOOLEAN))' (interval and boolean).; line 1 pos 7 -- !query 25 select interval 2 day + cast('2017-12-11 09:30:00.0' as timestamp) -- !query 25 schema -struct +struct -- !query 25 output 2017-12-13 09:30:00 @@ -231,7 +231,7 @@ struct +struct -- !query 26 output 2017-12-13 @@ -242,7 +242,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query 27 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) - interval 2 days)' (tinyint and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS TINYINT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) - 2 days)' (tinyint and interval).; line 1 pos 7 -- !query 28 @@ -251,7 +251,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query 28 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) - interval 2 days)' (smallint and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS SMALLINT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) - 2 days)' (smallint and interval).; line 1 pos 7 -- !query 29 @@ -260,7 +260,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query 29 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) - interval 2 days)' (int and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS INT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS INT) - 2 days)' (int and interval).; line 1 pos 7 -- !query 30 @@ -269,7 +269,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query 30 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) - interval 2 days)' (bigint and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS BIGINT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) - 2 days)' (bigint and interval).; line 1 pos 7 -- !query 31 @@ -278,7 +278,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query 31 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) - interval 2 days)' (float and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS FLOAT) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) - 2 days)' (float and interval).; line 1 pos 7 -- !query 32 @@ -287,7 +287,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query 32 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) - interval 2 days)' (double and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS DOUBLE) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) - 2 days)' (double and interval).; line 1 pos 7 -- !query 33 @@ -296,13 +296,13 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query 33 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - interval 2 days)' (decimal(10,0) and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - 2 days)' (decimal(10,0) and interval).; line 1 pos 7 -- !query 34 select cast('2017-12-11' as string) - interval 2 day -- !query 34 schema -struct +struct -- !query 34 output 2017-12-09 00:00:00 @@ -310,7 +310,7 @@ struct +struct -- !query 35 output 2017-12-09 09:30:00 @@ -321,7 +321,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query 36 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) - interval 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - interval 2 days)' (binary and interval).; line 1 pos 7 +cannot resolve '(CAST('1' AS BINARY) - 2 days)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - 2 days)' (binary and interval).; line 1 pos 7 -- !query 37 @@ -330,13 +330,13 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query 37 output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) - interval 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - interval 2 days)' (boolean and interval).; line 1 pos 7 +cannot resolve '(CAST(1 AS BOOLEAN) - 2 days)' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - 2 days)' (boolean and interval).; line 1 pos 7 -- !query 38 select cast('2017-12-11 09:30:00.0' as timestamp) - interval 2 day -- !query 38 schema -struct +struct -- !query 38 output 2017-12-09 09:30:00 @@ -344,6 +344,6 @@ struct +struct -- !query 39 output 2017-12-09 diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index a44e67dcd8..b5ce121d2b 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -154,7 +154,7 @@ SELECT val_timestamp, udf(cate), avg(val_timestamp) OVER(PARTITION BY udf(cate) RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING) FROM testData ORDER BY udf(cate), val_timestamp -- !query 9 schema -struct +struct -- !query 9 output NULL NULL NULL 2017-07-31 17:00:00 NULL 1.5015456E9 diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index 74036d0782..1698ac081b 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -154,7 +154,7 @@ SELECT val_timestamp, cate, avg(val_timestamp) OVER(PARTITION BY cate ORDER BY v RANGE BETWEEN CURRENT ROW AND interval 23 days 4 hours FOLLOWING) FROM testData ORDER BY cate, val_timestamp -- !query 9 schema -struct +struct -- !query 9 output NULL NULL NULL 2017-07-31 17:00:00 NULL 1.5015456E9 diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala index 2b7cc6f182..6ea37baeaf 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala @@ -293,10 +293,10 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { val i = new CalendarInterval(2, 2, 2000000L) val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") checkAnswer( - df.selectExpr(s"d + $i"), + df.selectExpr(s"d + INTERVAL'$i'"), Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02")))) checkAnswer( - df.selectExpr(s"t + $i"), + df.selectExpr(s"t + INTERVAL'$i'"), Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")), Row(Timestamp.valueOf("2016-03-02 00:00:02")))) } @@ -309,10 +309,10 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { val i = new CalendarInterval(2, 2, 2000000L) val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") checkAnswer( - df.selectExpr(s"d - $i"), + df.selectExpr(s"d - INTERVAL'$i'"), Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26")))) checkAnswer( - df.selectExpr(s"t - $i"), + df.selectExpr(s"t - INTERVAL'$i'"), Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")), Row(Timestamp.valueOf("2015-12-27 00:00:00")))) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala index e55d2bbe00..c41ca92572 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala @@ -218,15 +218,15 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession { test("to_json - key types of map don't matter") { // interval type is invalid for converting to JSON. However, the keys of a map are treated // as strings, so its type doesn't matter. - val df = Seq(Tuple1(Tuple1("interval -3 month 7 hours"))).toDF("a") + val df = Seq(Tuple1(Tuple1("-3 month 7 hours"))).toDF("a") .select(struct(map($"a._1".cast(CalendarIntervalType), lit("a")).as("col1")).as("c")) checkAnswer( df.select(to_json($"c")), - Row("""{"col1":{"interval -3 months 7 hours":"a"}}""") :: Nil) + Row("""{"col1":{"-3 months 7 hours":"a"}}""") :: Nil) } test("to_json unsupported type") { - val baseDf = Seq(Tuple1(Tuple1("interval -3 month 7 hours"))).toDF("a") + val baseDf = Seq(Tuple1(Tuple1("-3 month 7 hours"))).toDF("a") val df = baseDf.select(struct($"a._1".cast(CalendarIntervalType).as("a")).as("c")) val e = intercept[AnalysisException]{ // Unsupported type throws an exception diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala index 3c8d25d935..8754177f86 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala @@ -675,7 +675,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest { withJdbcStatement() { statement => val rs = statement.executeQuery("SELECT interval 3 months 1 hours") assert(rs.next()) - assert(rs.getString(1) === "interval 3 months 1 hours") + assert(rs.getString(1) === "3 months 1 hours") } // Invalid interval value withJdbcStatement() { statement => diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala index 10ec1ee168..a63b5dac0a 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala @@ -264,7 +264,7 @@ class SparkThriftServerProtocolVersionsSuite extends HiveThriftJdbcTest { test(s"$version get interval type") { testExecuteStatementWithProtocolVersion(version, "SELECT interval '1' year '2' day") { rs => assert(rs.next()) - assert(rs.getString(1) === "interval 1 years 2 days") + assert(rs.getString(1) === "1 years 2 days") } }