[SPARK-36409][SQL][TESTS] Splitting test cases from datetime.sql

### What changes were proposed in this pull request?

Currently `datetime.sql` contains a lot of tests and will be run 3 times: default mode, ansi mode, ntz mode. It wastes the test time and also large test files are hard to read.

This PR proposes to split it into smaller ones:
1. `date.sql`, which contains date literals, functions and operations. It will be run twice with default and ansi mode.
2. `timestamp.sql`, which contains timestamp (no ltz or ntz suffix) literals, functions and operations. It will be run 4 times: default mode + ans off, defaul mode + ansi on, ntz mode + ansi off, ntz mode + ansi on.
3. `datetime_special.sql`, which create datetime values whose year is outside of [0, 9999]. This is a separated file as JDBC doesn't support them and need to ignore this test file. It will be run 4 times as well.
4. `timestamp_ltz.sql`, which contains timestamp_ltz literals and constructors. It will be run twice with default and ntz mode, to make sure its result doesn't change with the timestamp mode. Note that, operations with ltz are tested by `timestamp.sql`
5. `timestamp_ntz.sql`, which contains timestamp_ntz literals and constructors. It will be run twice with default and ntz mode, to make sure its result doesn't change with the timestamp mode. Note that, operations with ntz are tested by `timestamp.sql`

### Why are the changes needed?

reduce test run time.

### Does this PR introduce _any_ user-facing change?

no

### How was this patch tested?

N/A

Closes #33640 from cloud-fan/test.

Authored-by: Wenchen Fan <wenchen@databricks.com>
Signed-off-by: Gengliang Wang <gengliang@apache.org>
This commit is contained in:
Wenchen Fan 2021-08-05 20:55:32 +08:00 committed by Gengliang Wang
parent 02810eecbf
commit 095f9ff75b
33 changed files with 5087 additions and 7044 deletions

View file

@ -0,0 +1 @@
--IMPORT date.sql

View file

@ -0,0 +1 @@
--IMPORT datetime-special.sql

View file

@ -1 +0,0 @@
--IMPORT datetime.sql

View file

@ -0,0 +1 @@
--IMPORT timestamp.sql

View file

@ -0,0 +1,111 @@
-- date literals, functions and operations
select date '2019-01-01\t';
select date '2020-01-01中文';
select make_date(2019, 1, 1), make_date(12, 12, 12);
-- invalid month
select make_date(2000, 13, 1);
-- invalid day
select make_date(2000, 1, 33);
-- invalid: year field must have at least 4 digits
select date'015';
-- invalid: month field can have at most 2 digits
select date'2021-4294967297-11';
select current_date = current_date;
-- under ANSI mode, `current_date` can't be a function name.
select current_date() = current_date();
-- conversions between date and unix_date (number of days from epoch)
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null);
select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(null);
select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd');
-- missing fields in `to_date`
select to_date("16", "dd");
-- invalid: there is no 29 in February, 1970
select to_date("02-29", "MM-dd");
-- `dayofweek` accepts both date and timestamp ltz/ntz inputs.
select dayofweek('2007-02-03'), dayofweek('2009-07-30'), dayofweek('2017-05-27'), dayofweek(null),
dayofweek('1582-10-15 13:10:15'), dayofweek(timestamp_ltz'1582-10-15 13:10:15'), dayofweek(timestamp_ntz'1582-10-15 13:10:15');
-- `weekday` accepts both date and timestamp ltz/ntz inputs.
select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), weekday(null),
weekday('1582-10-15 13:10:15'), weekday(timestamp_ltz'1582-10-15 13:10:15'), weekday(timestamp_ntz'1582-10-15 13:10:15');
-- `year` accepts both date and timestamp ltz/ntz inputs.
select year('1500-01-01'), year('1582-10-15 13:10:15'), year(timestamp_ltz'1582-10-15 13:10:15'), year(timestamp_ntz'1582-10-15 13:10:15');
-- `month` accepts both date and timestamp ltz/ntz inputs.
select month('1500-01-01'), month('1582-10-15 13:10:15'), month(timestamp_ltz'1582-10-15 13:10:15'), month(timestamp_ntz'1582-10-15 13:10:15');
-- `dayOfYear` accepts both date and timestamp ltz/ntz inputs.
select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), dayOfYear(timestamp_ltz'1582-10-15 13:10:15'), dayOfYear(timestamp_ntz'1582-10-15 13:10:15');
-- next_day
select next_day("2015-07-23", "Mon");
select next_day("2015-07-23", "xx");
select next_day("2015-07-23 12:12:12", "Mon");
-- next_date does not accept timestamp lzt/ntz input
select next_day(timestamp_ltz"2015-07-23 12:12:12", "Mon");
select next_day(timestamp_ntz"2015-07-23 12:12:12", "Mon");
select next_day("xx", "Mon");
select next_day(null, "Mon");
select next_day(null, "xx");
-- date add
select date_add('2011-11-11', 1Y);
select date_add('2011-11-11', 1S);
select date_add('2011-11-11', 1);
-- invalid cases: the second parameter can only be byte/short/int
select date_add('2011-11-11', 1L);
select date_add('2011-11-11', 1.0);
select date_add('2011-11-11', 1E1);
-- the second parameter can be a string literal if it can be parsed to int
select date_add('2011-11-11', '1');
select date_add('2011-11-11', '1.2');
-- null input leads to null result.
select date_add(null, 1);
select date_add(date'2011-11-11', null);
-- `date_add` accepts both date and timestamp ltz/ntz inputs.
select date_add(date'2011-11-11', 1);
select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), date_add(timestamp_ntz'2011-11-11 12:12:12', 1);
-- date sub
select date_sub(date'2011-11-11', 1);
-- the second parameter can be a string literal if it can be parsed to int
select date_sub(date'2011-11-11', '1');
select date_sub(date'2011-11-11', '1.2');
-- `date_sub` accepts both date and timestamp ltz/ntz inputs.
select date_sub(timestamp_ltz'2011-11-11 12:12:12', 1), date_sub(timestamp_ntz'2011-11-11 12:12:12', 1);
-- null input leads to null result.
select date_sub(null, 1);
select date_sub(date'2011-11-11', null);
-- date add/sub with non-literal string column
create temp view v as select '1' str;
select date_add('2011-11-11', str) from v;
select date_sub('2011-11-11', str) from v;
-- date add/sub operations
select date'2011-11-11' + 1E1;
select date'2011-11-11' + '1';
select null + date '2001-09-28';
select date '2001-09-28' + 7Y;
select 7S + date '2001-09-28';
select date '2001-10-01' - 7;
select date '2001-10-01' - '7';
select date '2001-09-28' + null;
select date '2001-09-28' - null;
select '2011-11-11' - interval '2' day;
select null - date '2019-10-06';
select date '2001-10-01' - date '2001-09-28';
-- Unsupported narrow text style
select to_date('26/October/2015', 'dd/MMMMM/yyyy');
select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'));
select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'));

View file

@ -1,2 +1,3 @@
--SET spark.sql.legacy.timeParserPolicy=LEGACY
--IMPORT datetime.sql
--IMPORT date.sql
--IMPORT timestamp.sql

View file

@ -26,4 +26,16 @@ select to_timestamp('2020-10-350', 'yyyy-MM-DDD');
select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD');
-- add a special case to test csv, because the legacy formatter it uses is lenient then Spark should
-- throw SparkUpgradeException
select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD'))
select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD'));
-- Datetime types parse error
select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select cast("Unparseable" as timestamp);
select cast("Unparseable" as date);

View file

@ -0,0 +1,10 @@
-- special date and timestamp values that are not allowed in the SQL standard
-- these tests are put in this separated file because they don't work in JDBC environment
-- date with year outside [0000-9999]
select date'999999-03-18', date'-0001-1-28', date'0015';
select make_date(999999, 3, 18), make_date(-1, 1, 28);
-- timestamp with year outside [0000-9999]
select timestamp'-1969-12-31 16:00:00', timestamp'-0015-03-18 16:00:00', timestamp'-000001', timestamp'99999-03-18T12:03:17';
select make_timestamp(-1969, 12, 31, 16, 0, 0.0), make_timestamp(-15, 3, 18, 16, 0, 0.0), make_timestamp(99999, 3, 18, 12, 3, 17.0);

View file

@ -1,295 +0,0 @@
-- date time functions
-- [SPARK-31710] TIMESTAMP_SECONDS, TIMESTAMP_MILLISECONDS and TIMESTAMP_MICROSECONDS to timestamp transfer
select TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null);
select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), TIMESTAMP_SECONDS(FLOAT(1.23));
select TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null);
select TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null);
-- overflow exception
select TIMESTAMP_SECONDS(1230219000123123);
select TIMESTAMP_SECONDS(-1230219000123123);
select TIMESTAMP_MILLIS(92233720368547758);
select TIMESTAMP_MILLIS(-92233720368547758);
-- truncate exception
select TIMESTAMP_SECONDS(0.1234567);
-- truncation is OK for float/double
select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567));
-- UNIX_SECONDS, UNIX_MILLISECONDS and UNIX_MICROSECONDS
select UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08Z')), UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_SECONDS(null);
select UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08Z')), UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MILLIS(null);
select UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08Z')), UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MICROS(null);
-- DATE_FROM_UNIX_DATE
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null);
-- UNIX_DATE
select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(null);
-- [SPARK-16836] current_date and current_timestamp literals
select current_date = current_date(), current_timestamp = current_timestamp();
select localtimestamp() = localtimestamp();
select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd');
select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd');
select to_timestamp_ntz(null), to_timestamp_ntz('2016-12-31 00:12:00'), to_timestamp_ntz('2016-12-31', 'yyyy-MM-dd');
select to_timestamp_ntz(to_date(null)), to_timestamp_ntz(to_date('2016-12-31')), to_timestamp_ntz(to_date('2016-12-31', 'yyyy-MM-dd'));
select to_timestamp_ntz(to_timestamp(null)), to_timestamp_ntz(to_timestamp('2016-12-31 00:12:00')), to_timestamp_ntz(to_timestamp('2016-12-31', 'yyyy-MM-dd'));
select to_timestamp_ltz(null), to_timestamp_ltz('2016-12-31 00:12:00'), to_timestamp_ltz('2016-12-31', 'yyyy-MM-dd');
select to_timestamp_ltz(to_date(null)), to_timestamp_ltz(to_date('2016-12-31')), to_timestamp_ltz(to_date('2016-12-31', 'yyyy-MM-dd'));
select to_timestamp_ltz(to_timestamp(null)), to_timestamp_ltz(to_timestamp('2016-12-31 00:12:00')), to_timestamp_ltz(to_timestamp('2016-12-31', 'yyyy-MM-dd'));
select dayofweek('2007-02-03'), dayofweek('2009-07-30'), dayofweek('2017-05-27'), dayofweek(null), dayofweek('1582-10-15 13:10:15');
-- [SPARK-22333]: timeFunctionCall has conflicts with columnReference
create temporary view ttf1 as select * from values
(1, 2),
(2, 3)
as ttf1(current_date, current_timestamp);
select current_date, current_timestamp from ttf1;
create temporary view ttf2 as select * from values
(1, 2),
(2, 3)
as ttf2(a, b);
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2;
select a, b from ttf2 order by a, current_date;
select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), weekday(null), weekday('1582-10-15 13:10:15');
select year('1500-01-01'), month('1500-01-01'), dayOfYear('1500-01-01');
select date '2019-01-01\t';
select timestamp '2019-01-01\t';
select date '2020-01-01中文';
select timestamp '2019-01-01中文';
-- time add/sub
select timestamp'2011-11-11 11:11:11' + interval '2' day;
select timestamp'2011-11-11 11:11:11' - interval '2' day;
select date'2011-11-11 11:11:11' + interval '2' second;
select date'2011-11-11 11:11:11' - interval '2' second;
select '2011-11-11' - interval '2' day;
select '2011-11-11 11:11:11' - interval '2' second;
select '1' - interval '2' second;
select 1 - interval '2' second;
-- subtract timestamps
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678';
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
select timestamp'2019-10-06 10:11:12.345678' - null;
select null - timestamp'2019-10-06 10:11:12.345678';
-- subtract timestamps without time zone
select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678');
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01';
select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null;
select null - to_timestamp_ntz('2019-10-06 10:11:12.345678');
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677');
select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678');
select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678');
select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678');
-- date add/sub
select date_add('2011-11-11', 1Y);
select date_add('2011-11-11', 1S);
select date_add('2011-11-11', 1);
select date_add('2011-11-11', 1L);
select date_add('2011-11-11', 1.0);
select date_add('2011-11-11', 1E1);
select date_add('2011-11-11', '1');
select date_add('2011-11-11', '1.2');
select date_add(date'2011-11-11', 1);
select date_add(timestamp'2011-11-11', 1);
select date_sub(date'2011-11-11', 1);
select date_sub(date'2011-11-11', '1');
select date_sub(date'2011-11-11', '1.2');
select date_sub(timestamp'2011-11-11', 1);
select date_sub(null, 1);
select date_sub(date'2011-11-11', null);
select date'2011-11-11' + 1E1;
select date'2011-11-11' + '1';
select null + date '2001-09-28';
select date '2001-09-28' + 7Y;
select 7S + date '2001-09-28';
select date '2001-10-01' - 7;
select date '2001-10-01' - '7';
select date '2001-09-28' + null;
select date '2001-09-28' - null;
-- date add/sub with non-literal string column
create temp view v as select '1' str;
select date_add('2011-11-11', str) from v;
select date_sub('2011-11-11', str) from v;
-- subtract dates
select null - date '2019-10-06';
select date '2001-10-01' - date '2001-09-28';
-- variable-length second fraction tests
select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-- second fraction exceeded max variable length
select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-- special cases
select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]');
select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS");
select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm");
select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm");
select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'");
select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd");
select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS"); -- middle
select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''"); -- tail
select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss"); -- head
select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss"); -- head but as single quote
-- variable-length second fraction tests
select to_timestamp_ntz('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-- second fraction exceeded max variable length
select to_timestamp_ntz('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-- special cases
select to_timestamp_ntz('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp_ntz('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]');
select to_timestamp_ntz('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
select to_timestamp_ntz('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
select to_timestamp_ntz('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
select to_timestamp_ntz('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
select to_timestamp_ntz("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS");
select to_timestamp_ntz("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
select to_timestamp_ntz("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
select to_timestamp_ntz("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm");
select to_timestamp_ntz("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm");
select to_timestamp_ntz("2019-10-06S", "yyyy-MM-dd'S'");
select to_timestamp_ntz("S2019-10-06", "'S'yyyy-MM-dd");
select to_timestamp_ntz("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS"); -- middle
select to_timestamp_ntz("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''"); -- tail
select to_timestamp_ntz("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss"); -- head
select to_timestamp_ntz("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss"); -- head but as single quote
-- missing fields
select to_timestamp("16", "dd");
select to_timestamp("02-29", "MM-dd");
select to_timestamp_ntz("16", "dd");
select to_timestamp_ntz("02-29", "MM-dd");
select to_date("16", "dd");
select to_date("02-29", "MM-dd");
select to_timestamp("2019 40", "yyyy mm");
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss");
select to_timestamp_ntz("2019 40", "yyyy mm");
select to_timestamp_ntz("2019 10:10:10", "yyyy hh:mm:ss");
-- Unsupported narrow text style
select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG');
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE');
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE');
select to_timestamp_ntz('2019-10-06 A', 'yyyy-MM-dd GGGGG');
select to_timestamp_ntz('22 05 2020 Friday', 'dd MM yyyy EEEEEE');
select to_timestamp_ntz('22 05 2020 Friday', 'dd MM yyyy EEEEE');
select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE');
select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'));
select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'));
select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'));
select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'));
-- Datetime types parse error
select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select to_timestamp_ntz("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select to_timestamp_ntz("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
select cast("Unparseable" as timestamp);
select cast("Unparseable" as date);
-- next_day
select next_day("2015-07-23", "Mon");
select next_day("2015-07-23", "xx");
select next_day("xx", "Mon");
select next_day(null, "Mon");
select next_day(null, "xx");
-- TimestampNTZ + Intervals
select to_timestamp_ntz('2021-06-25 10:11:12') + interval 2 day;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '0-0' year to month;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '1-2' year to month;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '0 0:0:0' day to second;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '0 0:0:0.1' day to second;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '10-9' year to month;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '20 15' day to hour;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '20 15:40' day to minute;
select to_timestamp_ntz('2021-06-25 10:11:12') + interval '20 15:40:32.99899999' day to second;
-- TimestampNTZ - Intervals
select to_timestamp_ntz('2021-06-25 10:11:12') - interval 2 day;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '0-0' year to month;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '1-2' year to month;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '0 0:0:0' day to second;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '0 0:0:0.1' day to second;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '10-9' year to month;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '20 15' day to hour;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '20 15:40' day to minute;
select to_timestamp_ntz('2021-06-25 10:11:12') - interval '20 15:40:32.99899999' day to second;
-- timestamp numeric fields constructor
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678);
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET');
SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007);
-- TimestampNTZ numeric fields constructor
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678);
-- make_timestamp_ntz should not accept time zone input
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET');
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 60.007);
-- TimestampLTZ numeric fields constructor
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678);
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678, 'CET');
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 60.007);
-- datetime with year outside [0000-9999]
select date'999999-03-18';
select date'-0001-1-28';
select date'0015';
select cast('015' as date);
select cast('2021-4294967297-11' as date);
select timestamp'-1969-12-31 16:00:00';
select timestamp'0015-03-18 16:00:00';
select timestamp'-000001';
select timestamp'99999-03-18T12:03:17';
select cast('4294967297' as timestamp);
select cast('2021-01-01T12:30:4294967297.123456' as timestamp);

View file

@ -0,0 +1,16 @@
-- timestamp_ltz literals and constructors
--CONFIG_DIM1 spark.sql.timestampType=TIMESTAMP_LTZ
--CONFIG_DIM1 spark.sql.timestampType=TIMESTAMP_NTZ
select timestamp_ltz'2016-12-31 00:12:00', timestamp_ltz'2016-12-31';
select to_timestamp_ltz(null), to_timestamp_ltz('2016-12-31 00:12:00'), to_timestamp_ltz('2016-12-31', 'yyyy-MM-dd');
-- `to_timestamp_ltz` can also take date input
select to_timestamp_ltz(to_date(null)), to_timestamp_ltz(to_date('2016-12-31'));
-- `to_timestamp_ltz` can also take timestamp_ntz input
select to_timestamp_ltz(to_timestamp_ntz(null)), to_timestamp_ltz(to_timestamp_ntz('2016-12-31 00:12:00'));
-- TimestampLTZ numeric fields constructor
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678);
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678, 'CET');
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 60.007);

View file

@ -0,0 +1,17 @@
-- timestamp_ntz literals and constructors
--CONFIG_DIM1 spark.sql.timestampType=TIMESTAMP_LTZ
--CONFIG_DIM1 spark.sql.timestampType=TIMESTAMP_NTZ
select timestamp_ntz'2016-12-31 00:12:00', timestamp_ntz'2016-12-31';
select to_timestamp_ntz(null), to_timestamp_ntz('2016-12-31 00:12:00'), to_timestamp_ntz('2016-12-31', 'yyyy-MM-dd');
-- `to_timestamp_ntz` can also take date input
select to_timestamp_ntz(to_date(null)), to_timestamp_ntz(to_date('2016-12-31'));
-- `to_timestamp_ntz` can also take timestamp_ltz input
select to_timestamp_ntz(to_timestamp_ltz(null)), to_timestamp_ntz(to_timestamp_ltz('2016-12-31 00:12:00'));
-- TimestampNTZ numeric fields constructor
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678);
-- make_timestamp_ntz should not accept time zone input
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET');
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 60.007);

View file

@ -0,0 +1,117 @@
-- timestamp literals, functions and operations
select timestamp '2019-01-01\t';
select timestamp '2019-01-01中文';
-- invalid: year too large
select timestamp'4294967297';
-- invalid: minute field can have at most 2 digits
select timestamp'2021-01-01T12:30:4294967297.123456';
select current_timestamp = current_timestamp;
-- under ANSI mode, `current_timestamp` can't be a function name.
select current_timestamp() = current_timestamp();
select localtimestamp() = localtimestamp();
-- timestamp numeric fields constructor
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678);
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET');
SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007);
-- [SPARK-31710] TIMESTAMP_SECONDS, TIMESTAMP_MILLISECONDS and TIMESTAMP_MICROSECONDS that always create timestamp_ltz
select TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null);
select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), TIMESTAMP_SECONDS(FLOAT(1.23));
select TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null);
select TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null);
-- overflow exception
select TIMESTAMP_SECONDS(1230219000123123);
select TIMESTAMP_SECONDS(-1230219000123123);
select TIMESTAMP_MILLIS(92233720368547758);
select TIMESTAMP_MILLIS(-92233720368547758);
-- truncate exception
select TIMESTAMP_SECONDS(0.1234567);
-- truncation is OK for float/double
select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567));
-- [SPARK-22333]: timeFunctionCall has conflicts with columnReference
create temporary view ttf1 as select * from values
(1, 2),
(2, 3)
as ttf1(`current_date`, `current_timestamp`);
select typeof(current_date), typeof(current_timestamp) from ttf1;
create temporary view ttf2 as select * from values
(1, 2),
(2, 3)
as ttf2(a, b);
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2;
select a, b from ttf2 order by a, current_date;
-- UNIX_SECONDS, UNIX_MILLISECONDS and UNIX_MICROSECONDS
select UNIX_SECONDS(timestamp'2020-12-01 14:30:08Z'), UNIX_SECONDS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_SECONDS(null);
select UNIX_MILLIS(timestamp'2020-12-01 14:30:08Z'), UNIX_MILLIS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MILLIS(null);
select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MICROS(null);
select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd');
-- variable-length second fraction tests
select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-- second fraction exceeded max variable length
select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-- special cases
select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]');
select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS");
select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm");
select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm");
select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'");
select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd");
select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS"); -- middle
select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''"); -- tail
select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss"); -- head
select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss"); -- head but as single quote
-- missing fields
select to_timestamp("16", "dd");
select to_timestamp("02-29", "MM-dd");
select to_timestamp("2019 40", "yyyy mm");
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss");
-- timestamp add/sub operations
select timestamp'2011-11-11 11:11:11' + interval '2' day;
select timestamp'2011-11-11 11:11:11' - interval '2' day;
select timestamp'2011-11-11 11:11:11' + interval '2' second;
select timestamp'2011-11-11 11:11:11' - interval '2' second;
select '2011-11-11 11:11:11' - interval '2' second;
select '1' - interval '2' second;
select 1 - interval '2' second;
-- analyzer will cast date to timestamp automatically
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678';
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
select timestamp'2019-10-06 10:11:12.345678' - null;
select null - timestamp'2019-10-06 10:11:12.345678';
-- Unsupported narrow text style
select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG');
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE');
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE');
select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE');
select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'));
select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'));

View file

@ -0,0 +1 @@
--IMPORT datetime-special.sql

View file

@ -1 +0,0 @@
--IMPORT datetime.sql

View file

@ -0,0 +1,2 @@
--SET spark.sql.ansi.enabled = true
--IMPORT timestamp.sql

View file

@ -0,0 +1 @@
--IMPORT timestamp.sql

View file

@ -0,0 +1,554 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 63
-- !query
select date '2019-01-01\t'
-- !query schema
struct<DATE '2019-01-01':date>
-- !query output
2019-01-01
-- !query
select date '2020-01-01中文'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7)
== SQL ==
select date '2020-01-01中文'
-------^^^
-- !query
select make_date(2019, 1, 1), make_date(12, 12, 12)
-- !query schema
struct<make_date(2019, 1, 1):date,make_date(12, 12, 12):date>
-- !query output
2019-01-01 0012-12-12
-- !query
select make_date(2000, 13, 1)
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Invalid value for MonthOfYear (valid values 1 - 12): 13
-- !query
select make_date(2000, 1, 33)
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Invalid value for DayOfMonth (valid values 1 - 28/31): 33
-- !query
select date'015'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the DATE value: 015(line 1, pos 7)
== SQL ==
select date'015'
-------^^^
-- !query
select date'2021-4294967297-11'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)
== SQL ==
select date'2021-4294967297-11'
-------^^^
-- !query
select current_date = current_date
-- !query schema
struct<(current_date() = current_date()):boolean>
-- !query output
true
-- !query
select current_date() = current_date()
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
no viable alternative at input 'current_date'(line 1, pos 7)
== SQL ==
select current_date() = current_date()
-------^^^
-- !query
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null)
-- !query schema
struct<date_from_unix_date(0):date,date_from_unix_date(1000):date,date_from_unix_date(NULL):date>
-- !query output
1970-01-01 1972-09-27 NULL
-- !query
select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(null)
-- !query schema
struct<unix_date(1970-01-01):int,unix_date(2020-12-04):int,unix_date(NULL):int>
-- !query output
0 18600 NULL
-- !query
select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_date(NULL):date,to_date(2016-12-31):date,to_date(2016-12-31, yyyy-MM-dd):date>
-- !query output
NULL 2016-12-31 2016-12-31
-- !query
select to_date("16", "dd")
-- !query schema
struct<to_date(16, dd):date>
-- !query output
1970-01-16
-- !query
select to_date("02-29", "MM-dd")
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Invalid date 'February 29' as '1970' is not a leap year
-- !query
select dayofweek('2007-02-03'), dayofweek('2009-07-30'), dayofweek('2017-05-27'), dayofweek(null),
dayofweek('1582-10-15 13:10:15'), dayofweek(timestamp_ltz'1582-10-15 13:10:15'), dayofweek(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<dayofweek(2007-02-03):int,dayofweek(2009-07-30):int,dayofweek(2017-05-27):int,dayofweek(NULL):int,dayofweek(1582-10-15 13:10:15):int,dayofweek(TIMESTAMP '1582-10-15 13:10:15'):int,dayofweek(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
7 5 7 NULL 6 6 6
-- !query
select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), weekday(null),
weekday('1582-10-15 13:10:15'), weekday(timestamp_ltz'1582-10-15 13:10:15'), weekday(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<weekday(2007-02-03):int,weekday(2009-07-30):int,weekday(2017-05-27):int,weekday(NULL):int,weekday(1582-10-15 13:10:15):int,weekday(TIMESTAMP '1582-10-15 13:10:15'):int,weekday(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
5 3 5 NULL 4 4 4
-- !query
select year('1500-01-01'), year('1582-10-15 13:10:15'), year(timestamp_ltz'1582-10-15 13:10:15'), year(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<year(1500-01-01):int,year(1582-10-15 13:10:15):int,year(TIMESTAMP '1582-10-15 13:10:15'):int,year(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
1500 1582 1582 1582
-- !query
select month('1500-01-01'), month('1582-10-15 13:10:15'), month(timestamp_ltz'1582-10-15 13:10:15'), month(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<month(1500-01-01):int,month(1582-10-15 13:10:15):int,month(TIMESTAMP '1582-10-15 13:10:15'):int,month(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
1 10 10 10
-- !query
select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), dayOfYear(timestamp_ltz'1582-10-15 13:10:15'), dayOfYear(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<dayofyear(1500-01-01):int,dayofyear(1582-10-15 13:10:15):int,dayofyear(TIMESTAMP '1582-10-15 13:10:15'):int,dayofyear(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
1 288 288 288
-- !query
select next_day("2015-07-23", "Mon")
-- !query schema
struct<next_day(2015-07-23, Mon):date>
-- !query output
2015-07-27
-- !query
select next_day("2015-07-23", "xx")
-- !query schema
struct<>
-- !query output
java.lang.IllegalArgumentException
Illegal input for day of week: xx
-- !query
select next_day("2015-07-23 12:12:12", "Mon")
-- !query schema
struct<next_day(2015-07-23 12:12:12, Mon):date>
-- !query output
2015-07-27
-- !query
select next_day(timestamp_ltz"2015-07-23 12:12:12", "Mon")
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'next_day(TIMESTAMP '2015-07-23 12:12:12', 'Mon')' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP '2015-07-23 12:12:12'' is of timestamp type.; line 1 pos 7
-- !query
select next_day(timestamp_ntz"2015-07-23 12:12:12", "Mon")
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', 'Mon')' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2015-07-23 12:12:12'' is of timestamp_ntz type.; line 1 pos 7
-- !query
select next_day("xx", "Mon")
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Cannot cast xx to DateType.
-- !query
select next_day(null, "Mon")
-- !query schema
struct<next_day(NULL, Mon):date>
-- !query output
NULL
-- !query
select next_day(null, "xx")
-- !query schema
struct<next_day(NULL, xx):date>
-- !query output
NULL
-- !query
select date_add('2011-11-11', 1Y)
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', 1S)
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', 1)
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', 1L)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7
-- !query
select date_add('2011-11-11', 1.0)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7
-- !query
select date_add('2011-11-11', 1E1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7
-- !query
select date_add('2011-11-11', '1')
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', '1.2')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
The second argument of 'date_add' function needs to be an integer.
-- !query
select date_add(null, 1)
-- !query schema
struct<date_add(NULL, 1):date>
-- !query output
NULL
-- !query
select date_add(date'2011-11-11', null)
-- !query schema
struct<date_add(DATE '2011-11-11', NULL):date>
-- !query output
NULL
-- !query
select date_add(date'2011-11-11', 1)
-- !query schema
struct<date_add(DATE '2011-11-11', 1):date>
-- !query output
2011-11-12
-- !query
select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
-- !query
select date_sub(date'2011-11-11', 1)
-- !query schema
struct<date_sub(DATE '2011-11-11', 1):date>
-- !query output
2011-11-10
-- !query
select date_sub(date'2011-11-11', '1')
-- !query schema
struct<date_sub(DATE '2011-11-11', 1):date>
-- !query output
2011-11-10
-- !query
select date_sub(date'2011-11-11', '1.2')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
The second argument of 'date_sub' function needs to be an integer.
-- !query
select date_sub(timestamp_ltz'2011-11-11 12:12:12', 1), date_sub(timestamp_ntz'2011-11-11 12:12:12', 1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_sub(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
-- !query
select date_sub(null, 1)
-- !query schema
struct<date_sub(NULL, 1):date>
-- !query output
NULL
-- !query
select date_sub(date'2011-11-11', null)
-- !query schema
struct<date_sub(DATE '2011-11-11', NULL):date>
-- !query output
NULL
-- !query
create temp view v as select '1' str
-- !query schema
struct<>
-- !query output
-- !query
select date_add('2011-11-11', str) from v
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), v.str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.str' is of string type.; line 1 pos 7
-- !query
select date_sub('2011-11-11', str) from v
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.str' is of string type.; line 1 pos 7
-- !query
select date'2011-11-11' + 1E1
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7
-- !query
select date'2011-11-11' + '1'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DATE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DATE)' is of date type.; line 1 pos 7
-- !query
select null + date '2001-09-28'
-- !query schema
struct<date_add(DATE '2001-09-28', NULL):date>
-- !query output
NULL
-- !query
select date '2001-09-28' + 7Y
-- !query schema
struct<date_add(DATE '2001-09-28', 7):date>
-- !query output
2001-10-05
-- !query
select 7S + date '2001-09-28'
-- !query schema
struct<date_add(DATE '2001-09-28', 7):date>
-- !query output
2001-10-05
-- !query
select date '2001-10-01' - 7
-- !query schema
struct<date_sub(DATE '2001-10-01', 7):date>
-- !query output
2001-09-24
-- !query
select date '2001-10-01' - '7'
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Cannot cast 7 to DateType.
-- !query
select date '2001-09-28' + null
-- !query schema
struct<date_add(DATE '2001-09-28', NULL):date>
-- !query output
NULL
-- !query
select date '2001-09-28' - null
-- !query schema
struct<date_sub(DATE '2001-09-28', NULL):date>
-- !query output
NULL
-- !query
select '2011-11-11' - interval '2' day
-- !query schema
struct<2011-11-11 - INTERVAL '2' DAY:string>
-- !query output
2011-11-09 00:00:00
-- !query
select null - date '2019-10-06'
-- !query schema
struct<(NULL - DATE '2019-10-06'):interval day>
-- !query output
NULL
-- !query
select date '2001-10-01' - date '2001-09-28'
-- !query schema
struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day>
-- !query output
3 00:00:00.000000000
-- !query
select to_date('26/October/2015', 'dd/MMMMM/yyyy')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html

View file

@ -0,0 +1,34 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 4
-- !query
select date'999999-03-18', date'-0001-1-28', date'0015'
-- !query schema
struct<DATE '+999999-03-18':date,DATE '-0001-01-28':date,DATE '0015-01-01':date>
-- !query output
+999999-03-18 -0001-01-28 0015-01-01
-- !query
select make_date(999999, 3, 18), make_date(-1, 1, 28)
-- !query schema
struct<make_date(999999, 3, 18):date,make_date(-1, 1, 28):date>
-- !query output
+999999-03-18 -0001-01-28
-- !query
select timestamp'-1969-12-31 16:00:00', timestamp'-0015-03-18 16:00:00', timestamp'-000001', timestamp'99999-03-18T12:03:17'
-- !query schema
struct<TIMESTAMP '-1969-12-31 16:00:00':timestamp,TIMESTAMP '-0015-03-18 16:00:00':timestamp,TIMESTAMP '-0001-01-01 00:00:00':timestamp,TIMESTAMP '+99999-03-18 12:03:17':timestamp>
-- !query output
-1969-12-31 16:00:00 -0015-03-18 16:00:00 -0001-01-01 00:00:00 +99999-03-18 12:03:17
-- !query
select make_timestamp(-1969, 12, 31, 16, 0, 0.0), make_timestamp(-15, 3, 18, 16, 0, 0.0), make_timestamp(99999, 3, 18, 12, 3, 17.0)
-- !query schema
struct<make_timestamp(-1969, 12, 31, 16, 0, 0.0):timestamp,make_timestamp(-15, 3, 18, 16, 0, 0.0):timestamp,make_timestamp(99999, 3, 18, 12, 3, 17.0):timestamp>
-- !query output
-1969-12-31 16:00:00 -0015-03-18 16:00:00 +99999-03-18 12:03:17

View file

@ -0,0 +1,676 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 77
-- !query
select timestamp '2019-01-01\t'
-- !query schema
struct<TIMESTAMP '2019-01-01 00:00:00':timestamp>
-- !query output
2019-01-01 00:00:00
-- !query
select timestamp '2019-01-01中文'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)
== SQL ==
select timestamp '2019-01-01中文'
-------^^^
-- !query
select timestamp'4294967297'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)
== SQL ==
select timestamp'4294967297'
-------^^^
-- !query
select timestamp'2021-01-01T12:30:4294967297.123456'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)
== SQL ==
select timestamp'2021-01-01T12:30:4294967297.123456'
-------^^^
-- !query
select current_timestamp = current_timestamp
-- !query schema
struct<(current_timestamp() = current_timestamp()):boolean>
-- !query output
true
-- !query
select current_timestamp() = current_timestamp()
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
no viable alternative at input 'current_timestamp'(line 1, pos 7)
== SQL ==
select current_timestamp() = current_timestamp()
-------^^^
-- !query
select localtimestamp() = localtimestamp()
-- !query schema
struct<(localtimestamp() = localtimestamp()):boolean>
-- !query output
true
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678)
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678):timestamp>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET')
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678, CET):timestamp>
-- !query output
2021-07-10 21:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007)
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
The fraction of sec must be zero. Valid range is [0, 60].
-- !query
select TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null)
-- !query schema
struct<timestamp_seconds(1230219000):timestamp,timestamp_seconds(-1230219000):timestamp,timestamp_seconds(NULL):timestamp>
-- !query output
2008-12-25 07:30:00 1931-01-07 00:30:00 NULL
-- !query
select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), TIMESTAMP_SECONDS(FLOAT(1.23))
-- !query schema
struct<timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp>
-- !query output
1969-12-31 16:00:01.23 1969-12-31 16:00:01.23 1969-12-31 16:00:01.23
-- !query
select TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null)
-- !query schema
struct<timestamp_millis(1230219000123):timestamp,timestamp_millis(-1230219000123):timestamp,timestamp_millis(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123 1931-01-07 00:29:59.877 NULL
-- !query
select TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null)
-- !query schema
struct<timestamp_micros(1230219000123123):timestamp,timestamp_micros(-1230219000123123):timestamp,timestamp_micros(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123123 1931-01-07 00:29:59.876877 NULL
-- !query
select TIMESTAMP_SECONDS(1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(-1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(-92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(0.1234567)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
Rounding necessary
-- !query
select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567))
-- !query schema
struct<timestamp_seconds(0.1234567):timestamp,timestamp_seconds(0.1234567):timestamp>
-- !query output
1969-12-31 16:00:00.123456 1969-12-31 16:00:00.123456
-- !query
create temporary view ttf1 as select * from values
(1, 2),
(2, 3)
as ttf1(`current_date`, `current_timestamp`)
-- !query schema
struct<>
-- !query output
-- !query
select typeof(current_date), typeof(current_timestamp) from ttf1
-- !query schema
struct<typeof(current_date()):string,typeof(current_timestamp()):string>
-- !query output
date timestamp
date timestamp
-- !query
create temporary view ttf2 as select * from values
(1, 2),
(2, 3)
as ttf2(a, b)
-- !query schema
struct<>
-- !query output
-- !query
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
no viable alternative at input 'current_date'(line 1, pos 22)
== SQL ==
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2
----------------------^^^
-- !query
select a, b from ttf2 order by a, current_date
-- !query schema
struct<a:int,b:int>
-- !query output
1 2
2 3
-- !query
select UNIX_SECONDS(timestamp'2020-12-01 14:30:08Z'), UNIX_SECONDS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_SECONDS(null)
-- !query schema
struct<unix_seconds(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_seconds(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_seconds(NULL):bigint>
-- !query output
1606833008 1606833008 NULL
-- !query
select UNIX_MILLIS(timestamp'2020-12-01 14:30:08Z'), UNIX_MILLIS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MILLIS(null)
-- !query schema
struct<unix_millis(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_millis(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_millis(NULL):bigint>
-- !query output
1606833008000 1606833008999 NULL
-- !query
select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MICROS(null)
-- !query schema
struct<unix_micros(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_micros(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_micros(NULL):bigint>
-- !query output
1606833008000000 1606833008999999 NULL
-- !query
select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_timestamp(NULL):timestamp,to_timestamp(2016-12-31 00:12:00):timestamp,to_timestamp(2016-12-31, yyyy-MM-dd):timestamp>
-- !query output
NULL 2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '2019-10-06 10:11:12.' could not be parsed at index 20
-- !query
select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.1
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 03:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 08:11:12.12345
-- !query
select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26
-- !query
select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:00
-- !query
select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
-- !query schema
struct<to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp>
-- !query output
2019-10-06 10:11:12.12345
-- !query
select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '12.1232019-10-06S10:11' could not be parsed at index 7
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '12.1232019-10-06S10:11' could not be parsed at index 9
-- !query
select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm):timestamp>
-- !query output
0019-10-06 10:11:12.1234
-- !query
select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'")
-- !query schema
struct<to_timestamp(2019-10-06S, yyyy-MM-dd'S'):timestamp>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd")
-- !query schema
struct<to_timestamp(S2019-10-06, 'S'yyyy-MM-dd):timestamp>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss''):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("16", "dd")
-- !query schema
struct<to_timestamp(16, dd):timestamp>
-- !query output
1970-01-16 00:00:00
-- !query
select to_timestamp("02-29", "MM-dd")
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Invalid date 'February 29' as '1970' is not a leap year
-- !query
select to_timestamp("2019 40", "yyyy mm")
-- !query schema
struct<to_timestamp(2019 40, yyyy mm):timestamp>
-- !query output
2019-01-01 00:40:00
-- !query
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss")
-- !query schema
struct<to_timestamp(2019 10:10:10, yyyy hh:mm:ss):timestamp>
-- !query output
2019-01-01 10:10:10
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' day
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' + INTERVAL '2' DAY:timestamp>
-- !query output
2011-11-13 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' day
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' - INTERVAL '2' DAY:timestamp>
-- !query output
2011-11-09 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' second
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' + INTERVAL '02' SECOND:timestamp>
-- !query output
2011-11-11 11:11:13
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' - INTERVAL '02' SECOND:timestamp>
-- !query output
2011-11-11 11:11:09
-- !query
select '2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<2011-11-11 11:11:11 - INTERVAL '02' SECOND:string>
-- !query output
2011-11-11 11:11:09
-- !query
select '1' - interval '2' second
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Cannot cast 1 to TimestampType.
-- !query
select 1 - interval '2' second
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7
-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query schema
struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):interval day to second>
-- !query output
-86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - null
-- !query schema
struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - NULL):interval day to second>
-- !query output
NULL
-- !query
select null - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html

View file

@ -0,0 +1,541 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 63
-- !query
select date '2019-01-01\t'
-- !query schema
struct<DATE '2019-01-01':date>
-- !query output
2019-01-01
-- !query
select date '2020-01-01中文'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7)
== SQL ==
select date '2020-01-01中文'
-------^^^
-- !query
select make_date(2019, 1, 1), make_date(12, 12, 12)
-- !query schema
struct<make_date(2019, 1, 1):date,make_date(12, 12, 12):date>
-- !query output
2019-01-01 0012-12-12
-- !query
select make_date(2000, 13, 1)
-- !query schema
struct<make_date(2000, 13, 1):date>
-- !query output
NULL
-- !query
select make_date(2000, 1, 33)
-- !query schema
struct<make_date(2000, 1, 33):date>
-- !query output
NULL
-- !query
select date'015'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the DATE value: 015(line 1, pos 7)
== SQL ==
select date'015'
-------^^^
-- !query
select date'2021-4294967297-11'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)
== SQL ==
select date'2021-4294967297-11'
-------^^^
-- !query
select current_date = current_date
-- !query schema
struct<(current_date() = current_date()):boolean>
-- !query output
true
-- !query
select current_date() = current_date()
-- !query schema
struct<(current_date() = current_date()):boolean>
-- !query output
true
-- !query
select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null)
-- !query schema
struct<date_from_unix_date(0):date,date_from_unix_date(1000):date,date_from_unix_date(NULL):date>
-- !query output
1970-01-01 1972-09-27 NULL
-- !query
select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(null)
-- !query schema
struct<unix_date(1970-01-01):int,unix_date(2020-12-04):int,unix_date(NULL):int>
-- !query output
0 18600 NULL
-- !query
select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_date(NULL):date,to_date(2016-12-31):date,to_date(2016-12-31, yyyy-MM-dd):date>
-- !query output
NULL 2016-12-31 2016-12-31
-- !query
select to_date("16", "dd")
-- !query schema
struct<to_date(16, dd):date>
-- !query output
1970-01-16
-- !query
select to_date("02-29", "MM-dd")
-- !query schema
struct<to_date(02-29, MM-dd):date>
-- !query output
NULL
-- !query
select dayofweek('2007-02-03'), dayofweek('2009-07-30'), dayofweek('2017-05-27'), dayofweek(null),
dayofweek('1582-10-15 13:10:15'), dayofweek(timestamp_ltz'1582-10-15 13:10:15'), dayofweek(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<dayofweek(2007-02-03):int,dayofweek(2009-07-30):int,dayofweek(2017-05-27):int,dayofweek(NULL):int,dayofweek(1582-10-15 13:10:15):int,dayofweek(TIMESTAMP '1582-10-15 13:10:15'):int,dayofweek(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
7 5 7 NULL 6 6 6
-- !query
select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), weekday(null),
weekday('1582-10-15 13:10:15'), weekday(timestamp_ltz'1582-10-15 13:10:15'), weekday(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<weekday(2007-02-03):int,weekday(2009-07-30):int,weekday(2017-05-27):int,weekday(NULL):int,weekday(1582-10-15 13:10:15):int,weekday(TIMESTAMP '1582-10-15 13:10:15'):int,weekday(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
5 3 5 NULL 4 4 4
-- !query
select year('1500-01-01'), year('1582-10-15 13:10:15'), year(timestamp_ltz'1582-10-15 13:10:15'), year(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<year(1500-01-01):int,year(1582-10-15 13:10:15):int,year(TIMESTAMP '1582-10-15 13:10:15'):int,year(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
1500 1582 1582 1582
-- !query
select month('1500-01-01'), month('1582-10-15 13:10:15'), month(timestamp_ltz'1582-10-15 13:10:15'), month(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<month(1500-01-01):int,month(1582-10-15 13:10:15):int,month(TIMESTAMP '1582-10-15 13:10:15'):int,month(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
1 10 10 10
-- !query
select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), dayOfYear(timestamp_ltz'1582-10-15 13:10:15'), dayOfYear(timestamp_ntz'1582-10-15 13:10:15')
-- !query schema
struct<dayofyear(1500-01-01):int,dayofyear(1582-10-15 13:10:15):int,dayofyear(TIMESTAMP '1582-10-15 13:10:15'):int,dayofyear(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
-- !query output
1 288 288 288
-- !query
select next_day("2015-07-23", "Mon")
-- !query schema
struct<next_day(2015-07-23, Mon):date>
-- !query output
2015-07-27
-- !query
select next_day("2015-07-23", "xx")
-- !query schema
struct<next_day(2015-07-23, xx):date>
-- !query output
NULL
-- !query
select next_day("2015-07-23 12:12:12", "Mon")
-- !query schema
struct<next_day(2015-07-23 12:12:12, Mon):date>
-- !query output
2015-07-27
-- !query
select next_day(timestamp_ltz"2015-07-23 12:12:12", "Mon")
-- !query schema
struct<next_day(TIMESTAMP '2015-07-23 12:12:12', Mon):date>
-- !query output
2015-07-27
-- !query
select next_day(timestamp_ntz"2015-07-23 12:12:12", "Mon")
-- !query schema
struct<next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', Mon):date>
-- !query output
2015-07-27
-- !query
select next_day("xx", "Mon")
-- !query schema
struct<next_day(xx, Mon):date>
-- !query output
NULL
-- !query
select next_day(null, "Mon")
-- !query schema
struct<next_day(NULL, Mon):date>
-- !query output
NULL
-- !query
select next_day(null, "xx")
-- !query schema
struct<next_day(NULL, xx):date>
-- !query output
NULL
-- !query
select date_add('2011-11-11', 1Y)
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', 1S)
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', 1)
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', 1L)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7
-- !query
select date_add('2011-11-11', 1.0)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7
-- !query
select date_add('2011-11-11', 1E1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7
-- !query
select date_add('2011-11-11', '1')
-- !query schema
struct<date_add(2011-11-11, 1):date>
-- !query output
2011-11-12
-- !query
select date_add('2011-11-11', '1.2')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
The second argument of 'date_add' function needs to be an integer.
-- !query
select date_add(null, 1)
-- !query schema
struct<date_add(NULL, 1):date>
-- !query output
NULL
-- !query
select date_add(date'2011-11-11', null)
-- !query schema
struct<date_add(DATE '2011-11-11', NULL):date>
-- !query output
NULL
-- !query
select date_add(date'2011-11-11', 1)
-- !query schema
struct<date_add(DATE '2011-11-11', 1):date>
-- !query output
2011-11-12
-- !query
select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
-- !query
select date_sub(date'2011-11-11', 1)
-- !query schema
struct<date_sub(DATE '2011-11-11', 1):date>
-- !query output
2011-11-10
-- !query
select date_sub(date'2011-11-11', '1')
-- !query schema
struct<date_sub(DATE '2011-11-11', 1):date>
-- !query output
2011-11-10
-- !query
select date_sub(date'2011-11-11', '1.2')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
The second argument of 'date_sub' function needs to be an integer.
-- !query
select date_sub(timestamp_ltz'2011-11-11 12:12:12', 1), date_sub(timestamp_ntz'2011-11-11 12:12:12', 1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_sub(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
-- !query
select date_sub(null, 1)
-- !query schema
struct<date_sub(NULL, 1):date>
-- !query output
NULL
-- !query
select date_sub(date'2011-11-11', null)
-- !query schema
struct<date_sub(DATE '2011-11-11', NULL):date>
-- !query output
NULL
-- !query
create temp view v as select '1' str
-- !query schema
struct<>
-- !query output
-- !query
select date_add('2011-11-11', str) from v
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(CAST('2011-11-11' AS DATE), v.str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.str' is of string type.; line 1 pos 7
-- !query
select date_sub('2011-11-11', str) from v
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.str' is of string type.; line 1 pos 7
-- !query
select date'2011-11-11' + 1E1
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7
-- !query
select date'2011-11-11' + '1'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7
-- !query
select null + date '2001-09-28'
-- !query schema
struct<date_add(DATE '2001-09-28', NULL):date>
-- !query output
NULL
-- !query
select date '2001-09-28' + 7Y
-- !query schema
struct<date_add(DATE '2001-09-28', 7):date>
-- !query output
2001-10-05
-- !query
select 7S + date '2001-09-28'
-- !query schema
struct<date_add(DATE '2001-09-28', 7):date>
-- !query output
2001-10-05
-- !query
select date '2001-10-01' - 7
-- !query schema
struct<date_sub(DATE '2001-10-01', 7):date>
-- !query output
2001-09-24
-- !query
select date '2001-10-01' - '7'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve 'date_sub(DATE '2001-10-01', CAST('7' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('7' AS DOUBLE)' is of double type.; line 1 pos 7
-- !query
select date '2001-09-28' + null
-- !query schema
struct<date_add(DATE '2001-09-28', NULL):date>
-- !query output
NULL
-- !query
select date '2001-09-28' - null
-- !query schema
struct<date_sub(DATE '2001-09-28', NULL):date>
-- !query output
NULL
-- !query
select '2011-11-11' - interval '2' day
-- !query schema
struct<2011-11-11 - INTERVAL '2' DAY:string>
-- !query output
2011-11-09 00:00:00
-- !query
select null - date '2019-10-06'
-- !query schema
struct<(NULL - DATE '2019-10-06'):interval day>
-- !query output
NULL
-- !query
select date '2001-10-01' - date '2001-09-28'
-- !query schema
struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day>
-- !query output
3 00:00:00.000000000
-- !query
select to_date('26/October/2015', 'dd/MMMMM/yyyy')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html

View file

@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 19
-- Number of queries: 29
-- !query
@ -161,3 +161,83 @@ struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to parse '2018-366' in the new parser. You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat it as an invalid datetime string.
-- !query
select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<to_date(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):date>
-- !query output
NULL
-- !query
select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<to_date(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):date>
-- !query output
NULL
-- !query
select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<to_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):timestamp>
-- !query output
NULL
-- !query
select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<to_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):timestamp>
-- !query output
NULL
-- !query
select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):bigint>
-- !query output
NULL
-- !query
select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):bigint>
-- !query output
NULL
-- !query
select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):bigint>
-- !query output
NULL
-- !query
select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
-- !query schema
struct<to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):bigint>
-- !query output
NULL
-- !query
select cast("Unparseable" as timestamp)
-- !query schema
struct<CAST(Unparseable AS TIMESTAMP):timestamp>
-- !query output
NULL
-- !query
select cast("Unparseable" as date)
-- !query schema
struct<CAST(Unparseable AS DATE):date>
-- !query output
NULL

View file

@ -0,0 +1,34 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 4
-- !query
select date'999999-03-18', date'-0001-1-28', date'0015'
-- !query schema
struct<DATE '+999999-03-18':date,DATE '-0001-01-28':date,DATE '0015-01-01':date>
-- !query output
+999999-03-18 -0001-01-28 0015-01-01
-- !query
select make_date(999999, 3, 18), make_date(-1, 1, 28)
-- !query schema
struct<make_date(999999, 3, 18):date,make_date(-1, 1, 28):date>
-- !query output
+999999-03-18 -0001-01-28
-- !query
select timestamp'-1969-12-31 16:00:00', timestamp'-0015-03-18 16:00:00', timestamp'-000001', timestamp'99999-03-18T12:03:17'
-- !query schema
struct<TIMESTAMP '-1969-12-31 16:00:00':timestamp,TIMESTAMP '-0015-03-18 16:00:00':timestamp,TIMESTAMP '-0001-01-01 00:00:00':timestamp,TIMESTAMP '+99999-03-18 12:03:17':timestamp>
-- !query output
-1969-12-31 16:00:00 -0015-03-18 16:00:00 -0001-01-01 00:00:00 +99999-03-18 12:03:17
-- !query
select make_timestamp(-1969, 12, 31, 16, 0, 0.0), make_timestamp(-15, 3, 18, 16, 0, 0.0), make_timestamp(99999, 3, 18, 12, 3, 17.0)
-- !query schema
struct<make_timestamp(-1969, 12, 31, 16, 0, 0.0):timestamp,make_timestamp(-15, 3, 18, 16, 0, 0.0):timestamp,make_timestamp(99999, 3, 18, 12, 3, 17.0):timestamp>
-- !query output
-1969-12-31 16:00:00 -0015-03-18 16:00:00 +99999-03-18 12:03:17

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,58 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 7
-- !query
select timestamp_ltz'2016-12-31 00:12:00', timestamp_ltz'2016-12-31'
-- !query schema
struct<TIMESTAMP '2016-12-31 00:12:00':timestamp,TIMESTAMP '2016-12-31 00:00:00':timestamp>
-- !query output
2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp_ltz(null), to_timestamp_ltz('2016-12-31 00:12:00'), to_timestamp_ltz('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_timestamp_ltz(NULL):timestamp,to_timestamp_ltz(2016-12-31 00:12:00):timestamp,to_timestamp_ltz(2016-12-31, yyyy-MM-dd):timestamp>
-- !query output
NULL 2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp_ltz(to_date(null)), to_timestamp_ltz(to_date('2016-12-31'))
-- !query schema
struct<to_timestamp_ltz(to_date(NULL)):timestamp,to_timestamp_ltz(to_date(2016-12-31)):timestamp>
-- !query output
NULL 2016-12-31 00:00:00
-- !query
select to_timestamp_ltz(to_timestamp_ntz(null)), to_timestamp_ltz(to_timestamp_ntz('2016-12-31 00:12:00'))
-- !query schema
struct<to_timestamp_ltz(to_timestamp_ntz(NULL)):timestamp,to_timestamp_ltz(to_timestamp_ntz(2016-12-31 00:12:00)):timestamp>
-- !query output
NULL 2016-12-31 00:12:00
-- !query
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678)
-- !query schema
struct<make_timestamp_ltz(2021, 7, 11, 6, 30, 45.678):timestamp>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 45.678, 'CET')
-- !query schema
struct<make_timestamp_ltz(2021, 7, 11, 6, 30, 45.678):timestamp>
-- !query output
2021-07-10 21:30:45.678
-- !query
SELECT make_timestamp_ltz(2021, 07, 11, 6, 30, 60.007)
-- !query schema
struct<make_timestamp_ltz(2021, 7, 11, 6, 30, 60.007):timestamp>
-- !query output
NULL

View file

@ -0,0 +1,59 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 7
-- !query
select timestamp_ntz'2016-12-31 00:12:00', timestamp_ntz'2016-12-31'
-- !query schema
struct<TIMESTAMP_NTZ '2016-12-31 00:12:00':timestamp_ntz,TIMESTAMP_NTZ '2016-12-31 00:00:00':timestamp_ntz>
-- !query output
2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp_ntz(null), to_timestamp_ntz('2016-12-31 00:12:00'), to_timestamp_ntz('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_timestamp_ntz(NULL):timestamp_ntz,to_timestamp_ntz(2016-12-31 00:12:00):timestamp_ntz,to_timestamp_ntz(2016-12-31, yyyy-MM-dd):timestamp_ntz>
-- !query output
NULL 2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp_ntz(to_date(null)), to_timestamp_ntz(to_date('2016-12-31'))
-- !query schema
struct<to_timestamp_ntz(to_date(NULL)):timestamp_ntz,to_timestamp_ntz(to_date(2016-12-31)):timestamp_ntz>
-- !query output
NULL 2016-12-31 00:00:00
-- !query
select to_timestamp_ntz(to_timestamp_ltz(null)), to_timestamp_ntz(to_timestamp_ltz('2016-12-31 00:12:00'))
-- !query schema
struct<to_timestamp_ntz(to_timestamp_ltz(NULL)):timestamp_ntz,to_timestamp_ntz(to_timestamp_ltz(2016-12-31 00:12:00)):timestamp_ntz>
-- !query output
NULL 2016-12-31 00:12:00
-- !query
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678)
-- !query schema
struct<make_timestamp_ntz(2021, 7, 11, 6, 30, 45.678):timestamp_ntz>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7
-- !query
SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 60.007)
-- !query schema
struct<make_timestamp_ntz(2021, 7, 11, 6, 30, 60.007):timestamp_ntz>
-- !query output
NULL

View file

@ -0,0 +1,657 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 77
-- !query
select timestamp '2019-01-01\t'
-- !query schema
struct<TIMESTAMP '2019-01-01 00:00:00':timestamp>
-- !query output
2019-01-01 00:00:00
-- !query
select timestamp '2019-01-01中文'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)
== SQL ==
select timestamp '2019-01-01中文'
-------^^^
-- !query
select timestamp'4294967297'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)
== SQL ==
select timestamp'4294967297'
-------^^^
-- !query
select timestamp'2021-01-01T12:30:4294967297.123456'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)
== SQL ==
select timestamp'2021-01-01T12:30:4294967297.123456'
-------^^^
-- !query
select current_timestamp = current_timestamp
-- !query schema
struct<(current_timestamp() = current_timestamp()):boolean>
-- !query output
true
-- !query
select current_timestamp() = current_timestamp()
-- !query schema
struct<(current_timestamp() = current_timestamp()):boolean>
-- !query output
true
-- !query
select localtimestamp() = localtimestamp()
-- !query schema
struct<(localtimestamp() = localtimestamp()):boolean>
-- !query output
true
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678)
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678):timestamp>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET')
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678, CET):timestamp>
-- !query output
2021-07-10 21:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007)
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 60.007):timestamp>
-- !query output
NULL
-- !query
select TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null)
-- !query schema
struct<timestamp_seconds(1230219000):timestamp,timestamp_seconds(-1230219000):timestamp,timestamp_seconds(NULL):timestamp>
-- !query output
2008-12-25 07:30:00 1931-01-07 00:30:00 NULL
-- !query
select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), TIMESTAMP_SECONDS(FLOAT(1.23))
-- !query schema
struct<timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp>
-- !query output
1969-12-31 16:00:01.23 1969-12-31 16:00:01.23 1969-12-31 16:00:01.23
-- !query
select TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null)
-- !query schema
struct<timestamp_millis(1230219000123):timestamp,timestamp_millis(-1230219000123):timestamp,timestamp_millis(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123 1931-01-07 00:29:59.877 NULL
-- !query
select TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null)
-- !query schema
struct<timestamp_micros(1230219000123123):timestamp,timestamp_micros(-1230219000123123):timestamp,timestamp_micros(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123123 1931-01-07 00:29:59.876877 NULL
-- !query
select TIMESTAMP_SECONDS(1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(-1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(-92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(0.1234567)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
Rounding necessary
-- !query
select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567))
-- !query schema
struct<timestamp_seconds(0.1234567):timestamp,timestamp_seconds(0.1234567):timestamp>
-- !query output
1969-12-31 16:00:00.123456 1969-12-31 16:00:00.123456
-- !query
create temporary view ttf1 as select * from values
(1, 2),
(2, 3)
as ttf1(`current_date`, `current_timestamp`)
-- !query schema
struct<>
-- !query output
-- !query
select typeof(current_date), typeof(current_timestamp) from ttf1
-- !query schema
struct<typeof(current_date):string,typeof(current_timestamp):string>
-- !query output
int int
int int
-- !query
create temporary view ttf2 as select * from values
(1, 2),
(2, 3)
as ttf2(a, b)
-- !query schema
struct<>
-- !query output
-- !query
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2
-- !query schema
struct<(current_date() = current_date()):boolean,(current_timestamp() = current_timestamp()):boolean,a:int,b:int>
-- !query output
true true 1 2
true true 2 3
-- !query
select a, b from ttf2 order by a, current_date
-- !query schema
struct<a:int,b:int>
-- !query output
1 2
2 3
-- !query
select UNIX_SECONDS(timestamp'2020-12-01 14:30:08Z'), UNIX_SECONDS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_SECONDS(null)
-- !query schema
struct<unix_seconds(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_seconds(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_seconds(NULL):bigint>
-- !query output
1606833008 1606833008 NULL
-- !query
select UNIX_MILLIS(timestamp'2020-12-01 14:30:08Z'), UNIX_MILLIS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MILLIS(null)
-- !query schema
struct<unix_millis(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_millis(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_millis(NULL):bigint>
-- !query output
1606833008000 1606833008999 NULL
-- !query
select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MICROS(null)
-- !query schema
struct<unix_micros(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_micros(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_micros(NULL):bigint>
-- !query output
1606833008000000 1606833008999999 NULL
-- !query
select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_timestamp(NULL):timestamp,to_timestamp(2016-12-31 00:12:00):timestamp,to_timestamp(2016-12-31, yyyy-MM-dd):timestamp>
-- !query output
NULL 2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.1
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 03:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 08:11:12.12345
-- !query
select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
NULL
-- !query
select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp>
-- !query output
2019-10-06 10:11:00
-- !query
select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
-- !query schema
struct<to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp>
-- !query output
2019-10-06 10:11:12.12345
-- !query
select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp>
-- !query output
NULL
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm):timestamp>
-- !query output
NULL
-- !query
select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm):timestamp>
-- !query output
0019-10-06 10:11:12.1234
-- !query
select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'")
-- !query schema
struct<to_timestamp(2019-10-06S, yyyy-MM-dd'S'):timestamp>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd")
-- !query schema
struct<to_timestamp(S2019-10-06, 'S'yyyy-MM-dd):timestamp>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss''):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss):timestamp>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("16", "dd")
-- !query schema
struct<to_timestamp(16, dd):timestamp>
-- !query output
1970-01-16 00:00:00
-- !query
select to_timestamp("02-29", "MM-dd")
-- !query schema
struct<to_timestamp(02-29, MM-dd):timestamp>
-- !query output
NULL
-- !query
select to_timestamp("2019 40", "yyyy mm")
-- !query schema
struct<to_timestamp(2019 40, yyyy mm):timestamp>
-- !query output
2019-01-01 00:40:00
-- !query
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss")
-- !query schema
struct<to_timestamp(2019 10:10:10, yyyy hh:mm:ss):timestamp>
-- !query output
2019-01-01 10:10:10
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' day
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' + INTERVAL '2' DAY:timestamp>
-- !query output
2011-11-13 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' day
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' - INTERVAL '2' DAY:timestamp>
-- !query output
2011-11-09 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' second
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' + INTERVAL '02' SECOND:timestamp>
-- !query output
2011-11-11 11:11:13
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<TIMESTAMP '2011-11-11 11:11:11' - INTERVAL '02' SECOND:timestamp>
-- !query output
2011-11-11 11:11:09
-- !query
select '2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<2011-11-11 11:11:11 - INTERVAL '02' SECOND:string>
-- !query output
2011-11-11 11:11:09
-- !query
select '1' - interval '2' second
-- !query schema
struct<1 - INTERVAL '02' SECOND:string>
-- !query output
NULL
-- !query
select 1 - interval '2' second
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7
-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query schema
struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):interval day to second>
-- !query output
-86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - null
-- !query schema
struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - NULL):interval day to second>
-- !query output
NULL
-- !query
select null - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html

View file

@ -0,0 +1,34 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 4
-- !query
select date'999999-03-18', date'-0001-1-28', date'0015'
-- !query schema
struct<DATE '+999999-03-18':date,DATE '-0001-01-28':date,DATE '0015-01-01':date>
-- !query output
+999999-03-18 -0001-01-28 0015-01-01
-- !query
select make_date(999999, 3, 18), make_date(-1, 1, 28)
-- !query schema
struct<make_date(999999, 3, 18):date,make_date(-1, 1, 28):date>
-- !query output
+999999-03-18 -0001-01-28
-- !query
select timestamp'-1969-12-31 16:00:00', timestamp'-0015-03-18 16:00:00', timestamp'-000001', timestamp'99999-03-18T12:03:17'
-- !query schema
struct<TIMESTAMP_NTZ '-1969-12-31 16:00:00':timestamp_ntz,TIMESTAMP_NTZ '-0015-03-18 16:00:00':timestamp_ntz,TIMESTAMP_NTZ '-0001-01-01 00:00:00':timestamp_ntz,TIMESTAMP_NTZ '+99999-03-18 12:03:17':timestamp_ntz>
-- !query output
-1969-12-31 16:00:00 -0015-03-18 16:00:00 -0001-01-01 00:00:00 +99999-03-18 12:03:17
-- !query
select make_timestamp(-1969, 12, 31, 16, 0, 0.0), make_timestamp(-15, 3, 18, 16, 0, 0.0), make_timestamp(99999, 3, 18, 12, 3, 17.0)
-- !query schema
struct<make_timestamp(-1969, 12, 31, 16, 0, 0.0):timestamp_ntz,make_timestamp(-15, 3, 18, 16, 0, 0.0):timestamp_ntz,make_timestamp(99999, 3, 18, 12, 3, 17.0):timestamp_ntz>
-- !query output
-1969-12-31 16:00:00 -0015-03-18 16:00:00 +99999-03-18 12:03:17

View file

@ -0,0 +1,675 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 77
-- !query
select timestamp '2019-01-01\t'
-- !query schema
struct<TIMESTAMP_NTZ '2019-01-01 00:00:00':timestamp_ntz>
-- !query output
2019-01-01 00:00:00
-- !query
select timestamp '2019-01-01中文'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)
== SQL ==
select timestamp '2019-01-01中文'
-------^^^
-- !query
select timestamp'4294967297'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)
== SQL ==
select timestamp'4294967297'
-------^^^
-- !query
select timestamp'2021-01-01T12:30:4294967297.123456'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)
== SQL ==
select timestamp'2021-01-01T12:30:4294967297.123456'
-------^^^
-- !query
select current_timestamp = current_timestamp
-- !query schema
struct<(current_timestamp() = current_timestamp()):boolean>
-- !query output
true
-- !query
select current_timestamp() = current_timestamp()
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
no viable alternative at input 'current_timestamp'(line 1, pos 7)
== SQL ==
select current_timestamp() = current_timestamp()
-------^^^
-- !query
select localtimestamp() = localtimestamp()
-- !query schema
struct<(localtimestamp() = localtimestamp()):boolean>
-- !query output
true
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678)
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678):timestamp_ntz>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET')
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678, CET):timestamp_ntz>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007)
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
The fraction of sec must be zero. Valid range is [0, 60].
-- !query
select TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null)
-- !query schema
struct<timestamp_seconds(1230219000):timestamp,timestamp_seconds(-1230219000):timestamp,timestamp_seconds(NULL):timestamp>
-- !query output
2008-12-25 07:30:00 1931-01-07 00:30:00 NULL
-- !query
select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), TIMESTAMP_SECONDS(FLOAT(1.23))
-- !query schema
struct<timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp>
-- !query output
1969-12-31 16:00:01.23 1969-12-31 16:00:01.23 1969-12-31 16:00:01.23
-- !query
select TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null)
-- !query schema
struct<timestamp_millis(1230219000123):timestamp,timestamp_millis(-1230219000123):timestamp,timestamp_millis(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123 1931-01-07 00:29:59.877 NULL
-- !query
select TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null)
-- !query schema
struct<timestamp_micros(1230219000123123):timestamp,timestamp_micros(-1230219000123123):timestamp,timestamp_micros(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123123 1931-01-07 00:29:59.876877 NULL
-- !query
select TIMESTAMP_SECONDS(1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(-1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(-92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(0.1234567)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
Rounding necessary
-- !query
select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567))
-- !query schema
struct<timestamp_seconds(0.1234567):timestamp,timestamp_seconds(0.1234567):timestamp>
-- !query output
1969-12-31 16:00:00.123456 1969-12-31 16:00:00.123456
-- !query
create temporary view ttf1 as select * from values
(1, 2),
(2, 3)
as ttf1(`current_date`, `current_timestamp`)
-- !query schema
struct<>
-- !query output
-- !query
select typeof(current_date), typeof(current_timestamp) from ttf1
-- !query schema
struct<typeof(current_date()):string,typeof(current_timestamp()):string>
-- !query output
date timestamp
date timestamp
-- !query
create temporary view ttf2 as select * from values
(1, 2),
(2, 3)
as ttf2(a, b)
-- !query schema
struct<>
-- !query output
-- !query
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
no viable alternative at input 'current_date'(line 1, pos 22)
== SQL ==
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2
----------------------^^^
-- !query
select a, b from ttf2 order by a, current_date
-- !query schema
struct<a:int,b:int>
-- !query output
1 2
2 3
-- !query
select UNIX_SECONDS(timestamp'2020-12-01 14:30:08Z'), UNIX_SECONDS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_SECONDS(null)
-- !query schema
struct<unix_seconds(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_seconds(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_seconds(NULL):bigint>
-- !query output
1606833008 1606833008 NULL
-- !query
select UNIX_MILLIS(timestamp'2020-12-01 14:30:08Z'), UNIX_MILLIS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MILLIS(null)
-- !query schema
struct<unix_millis(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_millis(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_millis(NULL):bigint>
-- !query output
1606833008000 1606833008999 NULL
-- !query
select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MICROS(null)
-- !query schema
struct<unix_micros(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_micros(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_micros(NULL):bigint>
-- !query output
1606833008000000 1606833008999999 NULL
-- !query
select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_timestamp(NULL):timestamp_ntz,to_timestamp(2016-12-31 00:12:00):timestamp_ntz,to_timestamp(2016-12-31, yyyy-MM-dd):timestamp_ntz>
-- !query output
NULL 2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '2019-10-06 10:11:12.' could not be parsed at index 20
-- !query
select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12345
-- !query
select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26
-- !query
select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:00
-- !query
select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
-- !query schema
struct<to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12345
-- !query
select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '12.1232019-10-06S10:11' could not be parsed at index 7
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
-- !query schema
struct<>
-- !query output
java.time.format.DateTimeParseException
Text '12.1232019-10-06S10:11' could not be parsed at index 9
-- !query
select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm):timestamp_ntz>
-- !query output
0019-10-06 10:11:12.1234
-- !query
select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'")
-- !query schema
struct<to_timestamp(2019-10-06S, yyyy-MM-dd'S'):timestamp_ntz>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd")
-- !query schema
struct<to_timestamp(S2019-10-06, 'S'yyyy-MM-dd):timestamp_ntz>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss''):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("16", "dd")
-- !query schema
struct<to_timestamp(16, dd):timestamp_ntz>
-- !query output
1970-01-16 00:00:00
-- !query
select to_timestamp("02-29", "MM-dd")
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Invalid date 'February 29' as '1970' is not a leap year
-- !query
select to_timestamp("2019 40", "yyyy mm")
-- !query schema
struct<to_timestamp(2019 40, yyyy mm):timestamp_ntz>
-- !query output
2019-01-01 00:40:00
-- !query
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss")
-- !query schema
struct<to_timestamp(2019 10:10:10, yyyy hh:mm:ss):timestamp_ntz>
-- !query output
2019-01-01 10:10:10
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' day
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '2' DAY:timestamp_ntz>
-- !query output
2011-11-13 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' day
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '2' DAY:timestamp_ntz>
-- !query output
2011-11-09 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' second
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '02' SECOND:timestamp_ntz>
-- !query output
2011-11-11 11:11:13
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '02' SECOND:timestamp_ntz>
-- !query output
2011-11-11 11:11:09
-- !query
select '2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<2011-11-11 11:11:11 - INTERVAL '02' SECOND:string>
-- !query output
2011-11-11 11:11:09
-- !query
select '1' - interval '2' second
-- !query schema
struct<>
-- !query output
java.time.DateTimeException
Cannot cast 1 to TimestampType.
-- !query
select 1 - interval '2' second
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7
-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query schema
struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):interval day to second>
-- !query output
-86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - null
-- !query schema
struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - NULL):interval day to second>
-- !query output
NULL
-- !query
select null - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(NULL - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
-- !query schema
struct<>
-- !query output
java.lang.RuntimeException
Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
-- !query schema
struct<>
-- !query output
java.lang.RuntimeException
Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
java.lang.RuntimeException
Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<from_json({"t":"26/October/2015"}):struct<t:timestamp_ntz>>
-- !query output
{"t":null}
-- !query
select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
java.lang.Exception
Unsupported type: timestamp_ntz

View file

@ -0,0 +1,656 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 77
-- !query
select timestamp '2019-01-01\t'
-- !query schema
struct<TIMESTAMP_NTZ '2019-01-01 00:00:00':timestamp_ntz>
-- !query output
2019-01-01 00:00:00
-- !query
select timestamp '2019-01-01中文'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)
== SQL ==
select timestamp '2019-01-01中文'
-------^^^
-- !query
select timestamp'4294967297'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)
== SQL ==
select timestamp'4294967297'
-------^^^
-- !query
select timestamp'2021-01-01T12:30:4294967297.123456'
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)
== SQL ==
select timestamp'2021-01-01T12:30:4294967297.123456'
-------^^^
-- !query
select current_timestamp = current_timestamp
-- !query schema
struct<(current_timestamp() = current_timestamp()):boolean>
-- !query output
true
-- !query
select current_timestamp() = current_timestamp()
-- !query schema
struct<(current_timestamp() = current_timestamp()):boolean>
-- !query output
true
-- !query
select localtimestamp() = localtimestamp()
-- !query schema
struct<(localtimestamp() = localtimestamp()):boolean>
-- !query output
true
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678)
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678):timestamp_ntz>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET')
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 45.678, CET):timestamp_ntz>
-- !query output
2021-07-11 06:30:45.678
-- !query
SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007)
-- !query schema
struct<make_timestamp(2021, 7, 11, 6, 30, 60.007):timestamp_ntz>
-- !query output
NULL
-- !query
select TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null)
-- !query schema
struct<timestamp_seconds(1230219000):timestamp,timestamp_seconds(-1230219000):timestamp,timestamp_seconds(NULL):timestamp>
-- !query output
2008-12-25 07:30:00 1931-01-07 00:30:00 NULL
-- !query
select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), TIMESTAMP_SECONDS(FLOAT(1.23))
-- !query schema
struct<timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp>
-- !query output
1969-12-31 16:00:01.23 1969-12-31 16:00:01.23 1969-12-31 16:00:01.23
-- !query
select TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null)
-- !query schema
struct<timestamp_millis(1230219000123):timestamp,timestamp_millis(-1230219000123):timestamp,timestamp_millis(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123 1931-01-07 00:29:59.877 NULL
-- !query
select TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null)
-- !query schema
struct<timestamp_micros(1230219000123123):timestamp,timestamp_micros(-1230219000123123):timestamp,timestamp_micros(NULL):timestamp>
-- !query output
2008-12-25 07:30:00.123123 1931-01-07 00:29:59.876877 NULL
-- !query
select TIMESTAMP_SECONDS(1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(-1230219000123123)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_MILLIS(-92233720368547758)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
long overflow
-- !query
select TIMESTAMP_SECONDS(0.1234567)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
Rounding necessary
-- !query
select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567))
-- !query schema
struct<timestamp_seconds(0.1234567):timestamp,timestamp_seconds(0.1234567):timestamp>
-- !query output
1969-12-31 16:00:00.123456 1969-12-31 16:00:00.123456
-- !query
create temporary view ttf1 as select * from values
(1, 2),
(2, 3)
as ttf1(`current_date`, `current_timestamp`)
-- !query schema
struct<>
-- !query output
-- !query
select typeof(current_date), typeof(current_timestamp) from ttf1
-- !query schema
struct<typeof(current_date):string,typeof(current_timestamp):string>
-- !query output
int int
int int
-- !query
create temporary view ttf2 as select * from values
(1, 2),
(2, 3)
as ttf2(a, b)
-- !query schema
struct<>
-- !query output
-- !query
select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2
-- !query schema
struct<(current_date() = current_date()):boolean,(current_timestamp() = current_timestamp()):boolean,a:int,b:int>
-- !query output
true true 1 2
true true 2 3
-- !query
select a, b from ttf2 order by a, current_date
-- !query schema
struct<a:int,b:int>
-- !query output
1 2
2 3
-- !query
select UNIX_SECONDS(timestamp'2020-12-01 14:30:08Z'), UNIX_SECONDS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_SECONDS(null)
-- !query schema
struct<unix_seconds(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_seconds(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_seconds(NULL):bigint>
-- !query output
1606833008 1606833008 NULL
-- !query
select UNIX_MILLIS(timestamp'2020-12-01 14:30:08Z'), UNIX_MILLIS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MILLIS(null)
-- !query schema
struct<unix_millis(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_millis(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_millis(NULL):bigint>
-- !query output
1606833008000 1606833008999 NULL
-- !query
select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), UNIX_MICROS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MICROS(null)
-- !query schema
struct<unix_micros(TIMESTAMP '2020-12-01 06:30:08'):bigint,unix_micros(TIMESTAMP '2020-12-01 06:30:08.999999'):bigint,unix_micros(NULL):bigint>
-- !query output
1606833008000000 1606833008999999 NULL
-- !query
select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd')
-- !query schema
struct<to_timestamp(NULL):timestamp_ntz,to_timestamp(2016-12-31 00:12:00):timestamp_ntz,to_timestamp(2016-12-31, yyyy-MM-dd):timestamp_ntz>
-- !query output
NULL 2016-12-31 00:12:00 2016-12-31 00:00:00
-- !query
select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12345
-- !query
select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
NULL
-- !query
select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123456
-- !query
select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
-- !query schema
struct<to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.123
-- !query
select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
-- !query schema
struct<to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp_ntz>
-- !query output
2019-10-06 10:11:00
-- !query
select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
-- !query schema
struct<to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12345
-- !query
select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.1234
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
-- !query output
NULL
-- !query
select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm):timestamp_ntz>
-- !query output
NULL
-- !query
select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
-- !query schema
struct<to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm):timestamp_ntz>
-- !query output
0019-10-06 10:11:12.1234
-- !query
select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'")
-- !query schema
struct<to_timestamp(2019-10-06S, yyyy-MM-dd'S'):timestamp_ntz>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd")
-- !query schema
struct<to_timestamp(S2019-10-06, 'S'yyyy-MM-dd):timestamp_ntz>
-- !query output
2019-10-06 00:00:00
-- !query
select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp_ntz>
-- !query output
2019-10-06 10:11:12.12
-- !query
select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
-- !query schema
struct<to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss''):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
-- !query schema
struct<to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
-- !query output
2019-10-06 10:11:12
-- !query
select to_timestamp("16", "dd")
-- !query schema
struct<to_timestamp(16, dd):timestamp_ntz>
-- !query output
1970-01-16 00:00:00
-- !query
select to_timestamp("02-29", "MM-dd")
-- !query schema
struct<to_timestamp(02-29, MM-dd):timestamp_ntz>
-- !query output
NULL
-- !query
select to_timestamp("2019 40", "yyyy mm")
-- !query schema
struct<to_timestamp(2019 40, yyyy mm):timestamp_ntz>
-- !query output
2019-01-01 00:40:00
-- !query
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss")
-- !query schema
struct<to_timestamp(2019 10:10:10, yyyy hh:mm:ss):timestamp_ntz>
-- !query output
2019-01-01 10:10:10
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' day
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '2' DAY:timestamp_ntz>
-- !query output
2011-11-13 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' day
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '2' DAY:timestamp_ntz>
-- !query output
2011-11-09 11:11:11
-- !query
select timestamp'2011-11-11 11:11:11' + interval '2' second
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '02' SECOND:timestamp_ntz>
-- !query output
2011-11-11 11:11:13
-- !query
select timestamp'2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '02' SECOND:timestamp_ntz>
-- !query output
2011-11-11 11:11:09
-- !query
select '2011-11-11 11:11:11' - interval '2' second
-- !query schema
struct<2011-11-11 11:11:11 - INTERVAL '02' SECOND:string>
-- !query output
2011-11-11 11:11:09
-- !query
select '1' - interval '2' second
-- !query schema
struct<1 - INTERVAL '02' SECOND:string>
-- !query output
NULL
-- !query
select 1 - interval '2' second
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7
-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
-- !query schema
struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):interval day to second>
-- !query output
-86 13:48:47.654322000
-- !query
select timestamp'2019-10-06 10:11:12.345678' - null
-- !query schema
struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - NULL):interval day to second>
-- !query output
NULL
-- !query
select null - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(NULL - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to second>
-- !query output
NULL
-- !query
select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
-- !query schema
struct<>
-- !query output
java.lang.RuntimeException
Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
-- !query schema
struct<>
-- !query output
java.lang.RuntimeException
Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
java.lang.RuntimeException
Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
-- !query schema
struct<>
-- !query output
org.apache.spark.SparkUpgradeException
You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
-- !query
select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<from_json({"t":"26/October/2015"}):struct<t:timestamp_ntz>>
-- !query output
{"t":null}
-- !query
select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
-- !query schema
struct<>
-- !query output
java.lang.Exception
Unsupported type: timestamp_ntz

View file

@ -82,10 +82,10 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ
"postgreSQL/boolean.sql",
"postgreSQL/case.sql",
// SPARK-28624
"date.sql",
"datetime.sql",
"datetime-legacy.sql",
"ansi/datetime.sql",
"postgreSQL/date.sql",
"datetime-special.sql",
"ansi/datetime-special.sql",
"timestampNTZ/datetime-special.sql",
// SPARK-28620
"postgreSQL/float4.sql",
// SPARK-28636