diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala index 04a0f1a6fc..3a2d395dd1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala @@ -330,12 +330,13 @@ class JacksonParser( case udt: UserDefinedType[_] => makeConverter(udt.sqlType) - case _ => - (parser: JsonParser) => - // Here, we pass empty `PartialFunction` so that this case can be - // handled as a failed conversion. It will throw an exception as - // long as the value is not null. - parseJsonToken[AnyRef](parser, dataType)(PartialFunction.empty[JsonToken, AnyRef]) + case _: NullType => + (parser: JsonParser) => parseJsonToken[java.lang.Long](parser, dataType) { + case _ => null + } + + // We don't actually hit this exception though, we keep it for understandability + case _ => throw QueryExecutionErrors.unsupportedTypeError(dataType) } /** diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index fe83675617..fae7721542 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -661,9 +661,10 @@ You may get a different result due to the upgrading of Spark 3.0: Fail to recogn -- !query select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy')) -- !query schema -struct> +struct<> -- !query output -{"t":null} +java.lang.Exception +Unsupported type: timestamp_ntz -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index b8a68005eb..c6de535807 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -642,9 +642,10 @@ You may get a different result due to the upgrading of Spark 3.0: Fail to recogn -- !query select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy')) -- !query schema -struct> +struct<> -- !query output -{"t":null} +java.lang.Exception +Unsupported type: timestamp_ntz -- !query