[SPARK-28885][SQL][FOLLOW-UP] Re-enable the ported PgSQL regression tests of SQLQueryTestSuite

### What changes were proposed in this pull request?

SPARK-28885(#26107) has supported the ANSI store assignment rules and stopped running some ported PgSQL regression tests that violate the rules. To re-activate these tests, this pr is to modify them for passing tests with the rules.

### Why are the changes needed?

To make the test coverage better.

### Does this PR introduce any user-facing change?

No.

### How was this patch tested?

Existing tests.

Closes #26492 from maropu/SPARK-28885-FOLLOWUP.

Authored-by: Takeshi Yamamuro <yamamuro@apache.org>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
This commit is contained in:
Takeshi Yamamuro 2019-11-20 08:32:13 -08:00 committed by Dongjoon Hyun
parent b5df40bd87
commit 6eeb131941
17 changed files with 1150 additions and 1079 deletions

View file

@ -7,23 +7,25 @@
CREATE TABLE DATE_TBL (f1 date) USING parquet;
INSERT INTO DATE_TBL VALUES ('1957-04-09');
INSERT INTO DATE_TBL VALUES ('1957-06-13');
INSERT INTO DATE_TBL VALUES ('1996-02-28');
INSERT INTO DATE_TBL VALUES ('1996-02-29');
INSERT INTO DATE_TBL VALUES ('1996-03-01');
INSERT INTO DATE_TBL VALUES ('1996-03-02');
INSERT INTO DATE_TBL VALUES ('1997-02-28');
-- PostgreSQL implicitly casts string literals to data with date types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO DATE_TBL VALUES (date('1957-04-09'));
INSERT INTO DATE_TBL VALUES (date('1957-06-13'));
INSERT INTO DATE_TBL VALUES (date('1996-02-28'));
INSERT INTO DATE_TBL VALUES (date('1996-02-29'));
INSERT INTO DATE_TBL VALUES (date('1996-03-01'));
INSERT INTO DATE_TBL VALUES (date('1996-03-02'));
INSERT INTO DATE_TBL VALUES (date('1997-02-28'));
-- [SPARK-27923] Skip invalid date: 1997-02-29
-- INSERT INTO DATE_TBL VALUES ('1997-02-29');
INSERT INTO DATE_TBL VALUES ('1997-03-01');
INSERT INTO DATE_TBL VALUES ('1997-03-02');
INSERT INTO DATE_TBL VALUES ('2000-04-01');
INSERT INTO DATE_TBL VALUES ('2000-04-02');
INSERT INTO DATE_TBL VALUES ('2000-04-03');
INSERT INTO DATE_TBL VALUES ('2038-04-08');
INSERT INTO DATE_TBL VALUES ('2039-04-09');
INSERT INTO DATE_TBL VALUES ('2040-04-10');
-- INSERT INTO DATE_TBL VALUES ('1997-02-29'));
INSERT INTO DATE_TBL VALUES (date('1997-03-01'));
INSERT INTO DATE_TBL VALUES (date('1997-03-02'));
INSERT INTO DATE_TBL VALUES (date('2000-04-01'));
INSERT INTO DATE_TBL VALUES (date('2000-04-02'));
INSERT INTO DATE_TBL VALUES (date('2000-04-03'));
INSERT INTO DATE_TBL VALUES (date('2038-04-08'));
INSERT INTO DATE_TBL VALUES (date('2039-04-09'));
INSERT INTO DATE_TBL VALUES (date('2040-04-10'));
SELECT f1 AS `Fifteen` FROM DATE_TBL;

View file

@ -7,11 +7,13 @@
CREATE TABLE FLOAT4_TBL (f1 float) USING parquet;
INSERT INTO FLOAT4_TBL VALUES (' 0.0');
INSERT INTO FLOAT4_TBL VALUES ('1004.30 ');
INSERT INTO FLOAT4_TBL VALUES (' -34.84 ');
INSERT INTO FLOAT4_TBL VALUES ('1.2345678901234e+20');
INSERT INTO FLOAT4_TBL VALUES ('1.2345678901234e-20');
-- PostgreSQL implicitly casts string literals to data with floating point types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO FLOAT4_TBL VALUES (float(' 0.0'));
INSERT INTO FLOAT4_TBL VALUES (float('1004.30 '));
INSERT INTO FLOAT4_TBL VALUES (float(' -34.84 '));
INSERT INTO FLOAT4_TBL VALUES (float('1.2345678901234e+20'));
INSERT INTO FLOAT4_TBL VALUES (float('1.2345678901234e-20'));
-- [SPARK-28024] Incorrect numeric values when out of range
-- test for over and under flow

View file

@ -7,11 +7,13 @@
CREATE TABLE FLOAT8_TBL(f1 double) USING parquet;
INSERT INTO FLOAT8_TBL VALUES (' 0.0 ');
INSERT INTO FLOAT8_TBL VALUES ('1004.30 ');
INSERT INTO FLOAT8_TBL VALUES (' -34.84');
INSERT INTO FLOAT8_TBL VALUES ('1.2345678901234e+200');
INSERT INTO FLOAT8_TBL VALUES ('1.2345678901234e-200');
-- PostgreSQL implicitly casts string literals to data with floating point types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO FLOAT8_TBL VALUES (double(' 0.0 '));
INSERT INTO FLOAT8_TBL VALUES (double('1004.30 '));
INSERT INTO FLOAT8_TBL VALUES (double(' -34.84'));
INSERT INTO FLOAT8_TBL VALUES (double('1.2345678901234e+200'));
INSERT INTO FLOAT8_TBL VALUES (double('1.2345678901234e-200'));
-- [SPARK-28024] Incorrect numeric values when out of range
-- test for underflow and overflow handling
@ -227,15 +229,17 @@ SELECT atanh(double('NaN'));
TRUNCATE TABLE FLOAT8_TBL;
INSERT INTO FLOAT8_TBL VALUES ('0.0');
-- PostgreSQL implicitly casts string literals to data with floating point types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO FLOAT8_TBL VALUES (double('0.0'));
INSERT INTO FLOAT8_TBL VALUES ('-34.84');
INSERT INTO FLOAT8_TBL VALUES (double('-34.84'));
INSERT INTO FLOAT8_TBL VALUES ('-1004.30');
INSERT INTO FLOAT8_TBL VALUES (double('-1004.30'));
INSERT INTO FLOAT8_TBL VALUES ('-1.2345678901234e+200');
INSERT INTO FLOAT8_TBL VALUES (double('-1.2345678901234e+200'));
INSERT INTO FLOAT8_TBL VALUES ('-1.2345678901234e-200');
INSERT INTO FLOAT8_TBL VALUES (double('-1.2345678901234e-200'));
SELECT '' AS five, * FROM FLOAT8_TBL;

View file

@ -8,19 +8,23 @@
CREATE TABLE INT2_TBL(f1 smallint) USING parquet;
-- [SPARK-28023] Trim the string when cast string type to other types
INSERT INTO INT2_TBL VALUES (trim('0 '));
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT2_TBL VALUES (smallint(trim('0 ')));
INSERT INTO INT2_TBL VALUES (trim(' 1234 '));
INSERT INTO INT2_TBL VALUES (smallint(trim(' 1234 ')));
INSERT INTO INT2_TBL VALUES (trim(' -1234'));
INSERT INTO INT2_TBL VALUES (smallint(trim(' -1234')));
-- [SPARK-27923] Invalid input syntax for type short throws exception at PostgreSQL
-- INSERT INTO INT2_TBL VALUES ('34.5');
-- largest and smallest values
INSERT INTO INT2_TBL VALUES ('32767');
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT2_TBL VALUES (smallint('32767'));
INSERT INTO INT2_TBL VALUES ('-32767');
INSERT INTO INT2_TBL VALUES (smallint('-32767'));
-- bad input values -- should give errors
-- INSERT INTO INT2_TBL VALUES ('100000');

View file

@ -9,19 +9,23 @@
CREATE TABLE INT4_TBL(f1 int) USING parquet;
-- [SPARK-28023] Trim the string when cast string type to other types
INSERT INTO INT4_TBL VALUES (trim(' 0 '));
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT4_TBL VALUES (int(trim(' 0 ')));
INSERT INTO INT4_TBL VALUES (trim('123456 '));
INSERT INTO INT4_TBL VALUES (int(trim('123456 ')));
INSERT INTO INT4_TBL VALUES (trim(' -123456'));
INSERT INTO INT4_TBL VALUES (int(trim(' -123456')));
-- [SPARK-27923] Invalid input syntax for integer: "34.5" at PostgreSQL
-- INSERT INTO INT4_TBL(f1) VALUES ('34.5');
-- largest and smallest values
INSERT INTO INT4_TBL VALUES ('2147483647');
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT4_TBL VALUES (int('2147483647'));
INSERT INTO INT4_TBL VALUES ('-2147483647');
INSERT INTO INT4_TBL VALUES (int('-2147483647'));
-- [SPARK-27923] Spark SQL insert these bad inputs to NULL
-- bad input values

View file

@ -8,11 +8,13 @@
--
CREATE TABLE INT8_TBL(q1 bigint, q2 bigint) USING parquet;
INSERT INTO INT8_TBL VALUES(trim(' 123 '),trim(' 456'));
INSERT INTO INT8_TBL VALUES(trim('123 '),'4567890123456789');
INSERT INTO INT8_TBL VALUES('4567890123456789','123');
INSERT INTO INT8_TBL VALUES(+4567890123456789,'4567890123456789');
INSERT INTO INT8_TBL VALUES('+4567890123456789','-4567890123456789');
-- PostgreSQL implicitly casts string literals to data with integral types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO INT8_TBL VALUES(bigint(trim(' 123 ')),bigint(trim(' 456')));
INSERT INTO INT8_TBL VALUES(bigint(trim('123 ')),bigint('4567890123456789'));
INSERT INTO INT8_TBL VALUES(bigint('4567890123456789'),bigint('123'));
INSERT INTO INT8_TBL VALUES(+4567890123456789,bigint('4567890123456789'));
INSERT INTO INT8_TBL VALUES(bigint('+4567890123456789'),bigint('-4567890123456789'));
-- [SPARK-27923] Spark SQL insert there bad inputs to NULL
-- bad inputs

View file

@ -16,19 +16,23 @@ CREATE TABLE TIMESTAMP_TBL (d1 timestamp) USING parquet;
-- block is entered exactly at local midnight; then 'now' and 'today' have
-- the same values and the counts will come out different.
INSERT INTO TIMESTAMP_TBL VALUES ('now');
-- PostgreSQL implicitly casts string literals to data with timestamp types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('now'));
-- SELECT pg_sleep(0.1);
-- BEGIN;
INSERT INTO TIMESTAMP_TBL VALUES ('now');
INSERT INTO TIMESTAMP_TBL VALUES ('today');
INSERT INTO TIMESTAMP_TBL VALUES ('yesterday');
INSERT INTO TIMESTAMP_TBL VALUES ('tomorrow');
-- PostgreSQL implicitly casts string literals to data with timestamp types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('now'));
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('today'));
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('yesterday'));
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('tomorrow'));
-- time zone should be ignored by this data type
INSERT INTO TIMESTAMP_TBL VALUES ('tomorrow EST');
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('tomorrow EST'));
-- [SPARK-29024] Ignore case while resolving time zones
INSERT INTO TIMESTAMP_TBL VALUES ('tomorrow Zulu');
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('tomorrow Zulu'));
SELECT count(*) AS One FROM TIMESTAMP_TBL WHERE d1 = timestamp 'today';
SELECT count(*) AS Three FROM TIMESTAMP_TBL WHERE d1 = timestamp 'tomorrow';
@ -54,7 +58,9 @@ TRUNCATE TABLE TIMESTAMP_TBL;
-- Special values
-- INSERT INTO TIMESTAMP_TBL VALUES ('-infinity');
-- INSERT INTO TIMESTAMP_TBL VALUES ('infinity');
INSERT INTO TIMESTAMP_TBL VALUES ('epoch');
-- PostgreSQL implicitly casts string literals to data with timestamp types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('epoch'));
-- [SPARK-27923] Spark SQL insert there obsolete special values to NULL
-- Obsolete special values
-- INSERT INTO TIMESTAMP_TBL VALUES ('invalid');
@ -73,14 +79,16 @@ INSERT INTO TIMESTAMP_TBL VALUES ('epoch');
-- INSERT INTO TIMESTAMP_TBL VALUES ('Mon Feb 10 17:32:01.6 1997 PST');
-- ISO 8601 format
INSERT INTO TIMESTAMP_TBL VALUES ('1997-01-02');
INSERT INTO TIMESTAMP_TBL VALUES ('1997-01-02 03:04:05');
INSERT INTO TIMESTAMP_TBL VALUES ('1997-02-10 17:32:01-08');
-- PostgreSQL implicitly casts string literals to data with timestamp types, but
-- Spark does not support that kind of implicit casts.
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('1997-01-02'));
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('1997-01-02 03:04:05'));
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('1997-02-10 17:32:01-08'));
-- INSERT INTO TIMESTAMP_TBL VALUES ('1997-02-10 17:32:01-0800');
-- INSERT INTO TIMESTAMP_TBL VALUES ('1997-02-10 17:32:01 -08:00');
-- INSERT INTO TIMESTAMP_TBL VALUES ('19970210 173201 -0800');
-- INSERT INTO TIMESTAMP_TBL VALUES ('1997-06-10 17:32:01 -07:00');
INSERT INTO TIMESTAMP_TBL VALUES ('2001-09-22T18:19:20');
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('2001-09-22T18:19:20'));
-- POSIX format (note that the timezone abbrev is just decoration here)
-- INSERT INTO TIMESTAMP_TBL VALUES ('2000-03-15 08:14:01 GMT+8');

View file

@ -11,7 +11,7 @@ struct<>
-- !query 1
INSERT INTO DATE_TBL VALUES ('1957-04-09')
INSERT INTO DATE_TBL VALUES (date('1957-04-09'))
-- !query 1 schema
struct<>
-- !query 1 output
@ -19,7 +19,7 @@ struct<>
-- !query 2
INSERT INTO DATE_TBL VALUES ('1957-06-13')
INSERT INTO DATE_TBL VALUES (date('1957-06-13'))
-- !query 2 schema
struct<>
-- !query 2 output
@ -27,7 +27,7 @@ struct<>
-- !query 3
INSERT INTO DATE_TBL VALUES ('1996-02-28')
INSERT INTO DATE_TBL VALUES (date('1996-02-28'))
-- !query 3 schema
struct<>
-- !query 3 output
@ -35,7 +35,7 @@ struct<>
-- !query 4
INSERT INTO DATE_TBL VALUES ('1996-02-29')
INSERT INTO DATE_TBL VALUES (date('1996-02-29'))
-- !query 4 schema
struct<>
-- !query 4 output
@ -43,7 +43,7 @@ struct<>
-- !query 5
INSERT INTO DATE_TBL VALUES ('1996-03-01')
INSERT INTO DATE_TBL VALUES (date('1996-03-01'))
-- !query 5 schema
struct<>
-- !query 5 output
@ -51,7 +51,7 @@ struct<>
-- !query 6
INSERT INTO DATE_TBL VALUES ('1996-03-02')
INSERT INTO DATE_TBL VALUES (date('1996-03-02'))
-- !query 6 schema
struct<>
-- !query 6 output
@ -59,7 +59,7 @@ struct<>
-- !query 7
INSERT INTO DATE_TBL VALUES ('1997-02-28')
INSERT INTO DATE_TBL VALUES (date('1997-02-28'))
-- !query 7 schema
struct<>
-- !query 7 output
@ -67,7 +67,7 @@ struct<>
-- !query 8
INSERT INTO DATE_TBL VALUES ('1997-03-01')
INSERT INTO DATE_TBL VALUES (date('1997-03-01'))
-- !query 8 schema
struct<>
-- !query 8 output
@ -75,7 +75,7 @@ struct<>
-- !query 9
INSERT INTO DATE_TBL VALUES ('1997-03-02')
INSERT INTO DATE_TBL VALUES (date('1997-03-02'))
-- !query 9 schema
struct<>
-- !query 9 output
@ -83,7 +83,7 @@ struct<>
-- !query 10
INSERT INTO DATE_TBL VALUES ('2000-04-01')
INSERT INTO DATE_TBL VALUES (date('2000-04-01'))
-- !query 10 schema
struct<>
-- !query 10 output
@ -91,7 +91,7 @@ struct<>
-- !query 11
INSERT INTO DATE_TBL VALUES ('2000-04-02')
INSERT INTO DATE_TBL VALUES (date('2000-04-02'))
-- !query 11 schema
struct<>
-- !query 11 output
@ -99,7 +99,7 @@ struct<>
-- !query 12
INSERT INTO DATE_TBL VALUES ('2000-04-03')
INSERT INTO DATE_TBL VALUES (date('2000-04-03'))
-- !query 12 schema
struct<>
-- !query 12 output
@ -107,7 +107,7 @@ struct<>
-- !query 13
INSERT INTO DATE_TBL VALUES ('2038-04-08')
INSERT INTO DATE_TBL VALUES (date('2038-04-08'))
-- !query 13 schema
struct<>
-- !query 13 output
@ -115,7 +115,7 @@ struct<>
-- !query 14
INSERT INTO DATE_TBL VALUES ('2039-04-09')
INSERT INTO DATE_TBL VALUES (date('2039-04-09'))
-- !query 14 schema
struct<>
-- !query 14 output
@ -123,7 +123,7 @@ struct<>
-- !query 15
INSERT INTO DATE_TBL VALUES ('2040-04-10')
INSERT INTO DATE_TBL VALUES (date('2040-04-10'))
-- !query 15 schema
struct<>
-- !query 15 output
@ -582,7 +582,7 @@ struct<date_part('EPOCH', DATE '1970-01-01'):decimal(20,6)>
-- !query 55
SELECT EXTRACT(EPOCH FROM TIMESTAMP '1970-01-01')
-- !query 55 schema
struct<date_part('EPOCH', TIMESTAMP('1970-01-01 00:00:00')):decimal(20,6)>
struct<date_part('EPOCH', TIMESTAMP '1970-01-01 00:00:00'):decimal(20,6)>
-- !query 55 output
0
@ -790,7 +790,7 @@ true
-- !query 81
SELECT EXTRACT(CENTURY FROM TIMESTAMP '1970-03-20 04:30:00.00000')
-- !query 81 schema
struct<date_part('CENTURY', TIMESTAMP('1970-03-20 04:30:00')):int>
struct<date_part('CENTURY', TIMESTAMP '1970-03-20 04:30:00'):int>
-- !query 81 output
20
@ -798,7 +798,7 @@ struct<date_part('CENTURY', TIMESTAMP('1970-03-20 04:30:00')):int>
-- !query 82
SELECT DATE_TRUNC('MILLENNIUM', TIMESTAMP '1970-03-20 04:30:00.00000')
-- !query 82 schema
struct<date_trunc(MILLENNIUM, TIMESTAMP('1970-03-20 04:30:00')):timestamp>
struct<date_trunc(MILLENNIUM, TIMESTAMP '1970-03-20 04:30:00'):timestamp>
-- !query 82 output
1001-01-01 00:07:02
@ -814,7 +814,7 @@ struct<date_trunc(MILLENNIUM, CAST(DATE '1970-03-20' AS TIMESTAMP)):timestamp>
-- !query 84
SELECT DATE_TRUNC('CENTURY', TIMESTAMP '1970-03-20 04:30:00.00000')
-- !query 84 schema
struct<date_trunc(CENTURY, TIMESTAMP('1970-03-20 04:30:00')):timestamp>
struct<date_trunc(CENTURY, TIMESTAMP '1970-03-20 04:30:00'):timestamp>
-- !query 84 output
1901-01-01 00:00:00

View file

@ -11,7 +11,7 @@ struct<>
-- !query 1
INSERT INTO FLOAT4_TBL VALUES (' 0.0')
INSERT INTO FLOAT4_TBL VALUES (float(' 0.0'))
-- !query 1 schema
struct<>
-- !query 1 output
@ -19,7 +19,7 @@ struct<>
-- !query 2
INSERT INTO FLOAT4_TBL VALUES ('1004.30 ')
INSERT INTO FLOAT4_TBL VALUES (float('1004.30 '))
-- !query 2 schema
struct<>
-- !query 2 output
@ -27,7 +27,7 @@ struct<>
-- !query 3
INSERT INTO FLOAT4_TBL VALUES (' -34.84 ')
INSERT INTO FLOAT4_TBL VALUES (float(' -34.84 '))
-- !query 3 schema
struct<>
-- !query 3 output
@ -35,7 +35,7 @@ struct<>
-- !query 4
INSERT INTO FLOAT4_TBL VALUES ('1.2345678901234e+20')
INSERT INTO FLOAT4_TBL VALUES (float('1.2345678901234e+20'))
-- !query 4 schema
struct<>
-- !query 4 output
@ -43,7 +43,7 @@ struct<>
-- !query 5
INSERT INTO FLOAT4_TBL VALUES ('1.2345678901234e-20')
INSERT INTO FLOAT4_TBL VALUES (float('1.2345678901234e-20'))
-- !query 5 schema
struct<>
-- !query 5 output

View file

@ -11,7 +11,7 @@ struct<>
-- !query 1
INSERT INTO FLOAT8_TBL VALUES (' 0.0 ')
INSERT INTO FLOAT8_TBL VALUES (double(' 0.0 '))
-- !query 1 schema
struct<>
-- !query 1 output
@ -19,7 +19,7 @@ struct<>
-- !query 2
INSERT INTO FLOAT8_TBL VALUES ('1004.30 ')
INSERT INTO FLOAT8_TBL VALUES (double('1004.30 '))
-- !query 2 schema
struct<>
-- !query 2 output
@ -27,7 +27,7 @@ struct<>
-- !query 3
INSERT INTO FLOAT8_TBL VALUES (' -34.84')
INSERT INTO FLOAT8_TBL VALUES (double(' -34.84'))
-- !query 3 schema
struct<>
-- !query 3 output
@ -35,7 +35,7 @@ struct<>
-- !query 4
INSERT INTO FLOAT8_TBL VALUES ('1.2345678901234e+200')
INSERT INTO FLOAT8_TBL VALUES (double('1.2345678901234e+200'))
-- !query 4 schema
struct<>
-- !query 4 output
@ -43,7 +43,7 @@ struct<>
-- !query 5
INSERT INTO FLOAT8_TBL VALUES ('1.2345678901234e-200')
INSERT INTO FLOAT8_TBL VALUES (double('1.2345678901234e-200'))
-- !query 5 schema
struct<>
-- !query 5 output
@ -684,7 +684,7 @@ struct<>
-- !query 76
INSERT INTO FLOAT8_TBL VALUES ('0.0')
INSERT INTO FLOAT8_TBL VALUES (double('0.0'))
-- !query 76 schema
struct<>
-- !query 76 output
@ -692,7 +692,7 @@ struct<>
-- !query 77
INSERT INTO FLOAT8_TBL VALUES ('-34.84')
INSERT INTO FLOAT8_TBL VALUES (double('-34.84'))
-- !query 77 schema
struct<>
-- !query 77 output
@ -700,7 +700,7 @@ struct<>
-- !query 78
INSERT INTO FLOAT8_TBL VALUES ('-1004.30')
INSERT INTO FLOAT8_TBL VALUES (double('-1004.30'))
-- !query 78 schema
struct<>
-- !query 78 output
@ -708,7 +708,7 @@ struct<>
-- !query 79
INSERT INTO FLOAT8_TBL VALUES ('-1.2345678901234e+200')
INSERT INTO FLOAT8_TBL VALUES (double('-1.2345678901234e+200'))
-- !query 79 schema
struct<>
-- !query 79 output
@ -716,7 +716,7 @@ struct<>
-- !query 80
INSERT INTO FLOAT8_TBL VALUES ('-1.2345678901234e-200')
INSERT INTO FLOAT8_TBL VALUES (double('-1.2345678901234e-200'))
-- !query 80 schema
struct<>
-- !query 80 output

View file

@ -11,7 +11,7 @@ struct<>
-- !query 1
INSERT INTO INT2_TBL VALUES (trim('0 '))
INSERT INTO INT2_TBL VALUES (smallint(trim('0 ')))
-- !query 1 schema
struct<>
-- !query 1 output
@ -19,7 +19,7 @@ struct<>
-- !query 2
INSERT INTO INT2_TBL VALUES (trim(' 1234 '))
INSERT INTO INT2_TBL VALUES (smallint(trim(' 1234 ')))
-- !query 2 schema
struct<>
-- !query 2 output
@ -27,7 +27,7 @@ struct<>
-- !query 3
INSERT INTO INT2_TBL VALUES (trim(' -1234'))
INSERT INTO INT2_TBL VALUES (smallint(trim(' -1234')))
-- !query 3 schema
struct<>
-- !query 3 output
@ -35,7 +35,7 @@ struct<>
-- !query 4
INSERT INTO INT2_TBL VALUES ('32767')
INSERT INTO INT2_TBL VALUES (smallint('32767'))
-- !query 4 schema
struct<>
-- !query 4 output
@ -43,7 +43,7 @@ struct<>
-- !query 5
INSERT INTO INT2_TBL VALUES ('-32767')
INSERT INTO INT2_TBL VALUES (smallint('-32767'))
-- !query 5 schema
struct<>
-- !query 5 output

View file

@ -11,7 +11,7 @@ struct<>
-- !query 1
INSERT INTO INT4_TBL VALUES (trim(' 0 '))
INSERT INTO INT4_TBL VALUES (int(trim(' 0 ')))
-- !query 1 schema
struct<>
-- !query 1 output
@ -19,7 +19,7 @@ struct<>
-- !query 2
INSERT INTO INT4_TBL VALUES (trim('123456 '))
INSERT INTO INT4_TBL VALUES (int(trim('123456 ')))
-- !query 2 schema
struct<>
-- !query 2 output
@ -27,7 +27,7 @@ struct<>
-- !query 3
INSERT INTO INT4_TBL VALUES (trim(' -123456'))
INSERT INTO INT4_TBL VALUES (int(trim(' -123456')))
-- !query 3 schema
struct<>
-- !query 3 output
@ -35,7 +35,7 @@ struct<>
-- !query 4
INSERT INTO INT4_TBL VALUES ('2147483647')
INSERT INTO INT4_TBL VALUES (int('2147483647'))
-- !query 4 schema
struct<>
-- !query 4 output
@ -43,7 +43,7 @@ struct<>
-- !query 5
INSERT INTO INT4_TBL VALUES ('-2147483647')
INSERT INTO INT4_TBL VALUES (int('-2147483647'))
-- !query 5 schema
struct<>
-- !query 5 output

View file

@ -11,7 +11,7 @@ struct<>
-- !query 1
INSERT INTO INT8_TBL VALUES(trim(' 123 '),trim(' 456'))
INSERT INTO INT8_TBL VALUES(bigint(trim(' 123 ')),bigint(trim(' 456')))
-- !query 1 schema
struct<>
-- !query 1 output
@ -19,7 +19,7 @@ struct<>
-- !query 2
INSERT INTO INT8_TBL VALUES(trim('123 '),'4567890123456789')
INSERT INTO INT8_TBL VALUES(bigint(trim('123 ')),bigint('4567890123456789'))
-- !query 2 schema
struct<>
-- !query 2 output
@ -27,7 +27,7 @@ struct<>
-- !query 3
INSERT INTO INT8_TBL VALUES('4567890123456789','123')
INSERT INTO INT8_TBL VALUES(bigint('4567890123456789'),bigint('123'))
-- !query 3 schema
struct<>
-- !query 3 output
@ -35,7 +35,7 @@ struct<>
-- !query 4
INSERT INTO INT8_TBL VALUES(+4567890123456789,'4567890123456789')
INSERT INTO INT8_TBL VALUES(+4567890123456789,bigint('4567890123456789'))
-- !query 4 schema
struct<>
-- !query 4 output
@ -43,7 +43,7 @@ struct<>
-- !query 5
INSERT INTO INT8_TBL VALUES('+4567890123456789','-4567890123456789')
INSERT INTO INT8_TBL VALUES(bigint('+4567890123456789'),bigint('-4567890123456789'))
-- !query 5 schema
struct<>
-- !query 5 output

View file

@ -11,7 +11,7 @@ struct<>
-- !query 1
INSERT INTO TIMESTAMP_TBL VALUES ('now')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('now'))
-- !query 1 schema
struct<>
-- !query 1 output
@ -19,7 +19,7 @@ struct<>
-- !query 2
INSERT INTO TIMESTAMP_TBL VALUES ('now')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('now'))
-- !query 2 schema
struct<>
-- !query 2 output
@ -27,7 +27,7 @@ struct<>
-- !query 3
INSERT INTO TIMESTAMP_TBL VALUES ('today')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('today'))
-- !query 3 schema
struct<>
-- !query 3 output
@ -35,7 +35,7 @@ struct<>
-- !query 4
INSERT INTO TIMESTAMP_TBL VALUES ('yesterday')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('yesterday'))
-- !query 4 schema
struct<>
-- !query 4 output
@ -43,7 +43,7 @@ struct<>
-- !query 5
INSERT INTO TIMESTAMP_TBL VALUES ('tomorrow')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('tomorrow'))
-- !query 5 schema
struct<>
-- !query 5 output
@ -51,7 +51,7 @@ struct<>
-- !query 6
INSERT INTO TIMESTAMP_TBL VALUES ('tomorrow EST')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('tomorrow EST'))
-- !query 6 schema
struct<>
-- !query 6 output
@ -59,7 +59,7 @@ struct<>
-- !query 7
INSERT INTO TIMESTAMP_TBL VALUES ('tomorrow Zulu')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('tomorrow Zulu'))
-- !query 7 schema
struct<>
-- !query 7 output
@ -99,7 +99,7 @@ struct<>
-- !query 12
INSERT INTO TIMESTAMP_TBL VALUES ('epoch')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('epoch'))
-- !query 12 schema
struct<>
-- !query 12 output
@ -107,7 +107,7 @@ struct<>
-- !query 13
INSERT INTO TIMESTAMP_TBL VALUES ('1997-01-02')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('1997-01-02'))
-- !query 13 schema
struct<>
-- !query 13 output
@ -115,7 +115,7 @@ struct<>
-- !query 14
INSERT INTO TIMESTAMP_TBL VALUES ('1997-01-02 03:04:05')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('1997-01-02 03:04:05'))
-- !query 14 schema
struct<>
-- !query 14 output
@ -123,7 +123,7 @@ struct<>
-- !query 15
INSERT INTO TIMESTAMP_TBL VALUES ('1997-02-10 17:32:01-08')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('1997-02-10 17:32:01-08'))
-- !query 15 schema
struct<>
-- !query 15 output
@ -131,7 +131,7 @@ struct<>
-- !query 16
INSERT INTO TIMESTAMP_TBL VALUES ('2001-09-22T18:19:20')
INSERT INTO TIMESTAMP_TBL VALUES (timestamp('2001-09-22T18:19:20'))
-- !query 16 schema
struct<>
-- !query 16 output
@ -219,11 +219,11 @@ SELECT '' AS `54`, d1 - timestamp '1997-01-02' AS diff
-- !query 24 schema
struct<54:string,diff:interval>
-- !query 24 output
interval -236720 hours
interval 0 microseconds
interval 3 hours 4 minutes 5 seconds
interval 41393 hours 19 minutes 20 seconds
interval 953 hours 32 minutes 1 seconds
-236720 hours
0 seconds
3 hours 4 minutes 5 seconds
41393 hours 19 minutes 20 seconds
953 hours 32 minutes 1 seconds
-- !query 25
@ -242,11 +242,11 @@ SELECT '' AS `54`, d1 - timestamp '1997-01-02' AS diff
-- !query 26 schema
struct<54:string,diff:interval>
-- !query 26 output
interval -236720 hours
interval 0 microseconds
interval 3 hours 4 minutes 5 seconds
interval 41393 hours 19 minutes 20 seconds
interval 953 hours 32 minutes 1 seconds
-236720 hours
0 seconds
3 hours 4 minutes 5 seconds
41393 hours 19 minutes 20 seconds
953 hours 32 minutes 1 seconds
-- !query 27

View file

@ -143,19 +143,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession {
/** List of test cases to ignore, in lower cases. */
protected def blackList: Set[String] = Set(
"blacklist.sql", // Do NOT remove this one. It is here to test the blacklist functionality.
// SPARK-28885 String value is not allowed to be stored as numeric type with
// ANSI store assignment policy.
"postgreSQL/numeric.sql",
"postgreSQL/int2.sql",
"postgreSQL/int4.sql",
"postgreSQL/int8.sql",
"postgreSQL/float4.sql",
"postgreSQL/float8.sql",
// SPARK-28885 String value is not allowed to be stored as date/timestamp type with
// ANSI store assignment policy.
"postgreSQL/date.sql",
"postgreSQL/timestamp.sql"
"blacklist.sql" // Do NOT remove this one. It is here to test the blacklist functionality.
)
// Create all the test cases.