diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala index fe1fa87891..dfa746f7c7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala @@ -330,8 +330,12 @@ class JacksonParser( case udt: UserDefinedType[_] => makeConverter(udt.sqlType) - // We don't actually hit this exception though, we keep it for understandability - case _ => throw QueryExecutionErrors.unsupportedTypeError(dataType) + case _ => + (parser: JsonParser) => + // Here, we pass empty `PartialFunction` so that this case can be + // handled as a failed conversion. It will throw an exception as + // long as the value is not null. + parseJsonToken[AnyRef](parser, dataType)(PartialFunction.empty[JsonToken, AnyRef]) } /** diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index fae7721542..fe83675617 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -661,10 +661,9 @@ You may get a different result due to the upgrading of Spark 3.0: Fail to recogn -- !query select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy')) -- !query schema -struct<> +struct> -- !query output -java.lang.Exception -Unsupported type: timestamp_ntz +{"t":null} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index c6de535807..b8a68005eb 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -642,10 +642,9 @@ You may get a different result due to the upgrading of Spark 3.0: Fail to recogn -- !query select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy')) -- !query schema -struct<> +struct> -- !query output -java.lang.Exception -Unsupported type: timestamp_ntz +{"t":null} -- !query