diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonGenerator.scala index 9777d562c5..9bd05460ac 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonGenerator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonGenerator.scala @@ -156,6 +156,15 @@ private[sql] class JacksonGenerator( end) gen.writeString(ymString) + case DayTimeIntervalType(start, end) => + (row: SpecializedGetters, ordinal: Int) => + val dtString = IntervalUtils.toDayTimeIntervalString( + row.getLong(ordinal), + IntervalStringStyles.ANSI_STYLE, + start, + end) + gen.writeString(dtString) + case BinaryType => (row: SpecializedGetters, ordinal: Int) => gen.writeBinary(row.getBinary(ordinal)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala index 2aa735d8eb..8a1191c5b7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala @@ -302,6 +302,13 @@ class JacksonParser( Integer.valueOf(expr.eval(EmptyRow).asInstanceOf[Int]) } + case dt: DayTimeIntervalType => (parser: JsonParser) => + parseJsonToken[java.lang.Long](parser, dataType) { + case VALUE_STRING => + val expr = Cast(Literal(parser.getText), dt) + java.lang.Long.valueOf(expr.eval(EmptyRow).asInstanceOf[Long]) + } + case st: StructType => val fieldConverters = st.map(_.dataType).map(makeConverter).toArray (parser: JsonParser) => parseJsonToken[InternalRow](parser, dataType) { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala index c2bea8cbf1..82cca2b737 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql import java.text.SimpleDateFormat -import java.time.Period +import java.time.{Duration, Period} import java.util.Locale import collection.JavaConverters._ @@ -28,6 +28,7 @@ import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types._ +import org.apache.spark.sql.types.DayTimeIntervalType.{DAY, HOUR, MINUTE, SECOND} import org.apache.spark.sql.types.YearMonthIntervalType.{MONTH, YEAR} class JsonFunctionsSuite extends QueryTest with SharedSparkSession { @@ -847,4 +848,45 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession { } } } + + test("SPARK-35983: from_json/to_json for map types where value types are day-time intervals") { + val dtDF = Seq(Duration.ofDays(1).plusHours(2).plusMinutes(3).plusSeconds(4)).toDF + Seq( + (DayTimeIntervalType(), """{"key":"INTERVAL '1 02:03:04' DAY TO SECOND"}""", + Duration.ofDays(1).plusHours(2).plusMinutes(3).plusSeconds(4)), + (DayTimeIntervalType(DAY, MINUTE), """{"key":"INTERVAL '1 02:03' DAY TO MINUTE"}""", + Duration.ofDays(1).plusHours(2).plusMinutes(3)), + (DayTimeIntervalType(DAY, HOUR), """{"key":"INTERVAL '1 02' DAY TO HOUR"}""", + Duration.ofDays(1).plusHours(2)), + (DayTimeIntervalType(DAY), """{"key":"INTERVAL '1' DAY"}""", + Duration.ofDays(1)), + (DayTimeIntervalType(HOUR, SECOND), """{"key":"INTERVAL '26:03:04' HOUR TO SECOND"}""", + Duration.ofHours(26).plusMinutes(3).plusSeconds(4)), + (DayTimeIntervalType(HOUR, MINUTE), """{"key":"INTERVAL '26:03' HOUR TO MINUTE"}""", + Duration.ofHours(26).plusMinutes(3)), + (DayTimeIntervalType(HOUR), """{"key":"INTERVAL '26' HOUR"}""", + Duration.ofHours(26)), + (DayTimeIntervalType(MINUTE, SECOND), """{"key":"INTERVAL '1563:04' MINUTE TO SECOND"}""", + Duration.ofMinutes(1563).plusSeconds(4)), + (DayTimeIntervalType(MINUTE), """{"key":"INTERVAL '1563' MINUTE"}""", + Duration.ofMinutes(1563)), + (DayTimeIntervalType(SECOND), """{"key":"INTERVAL '93784' SECOND"}""", + Duration.ofSeconds(93784)) + ).foreach { case (toJsonDtype, toJsonExpected, fromJsonExpected) => + val toJsonDF = dtDF.select(to_json(map(lit("key"), $"value" cast toJsonDtype)) as "json") + checkAnswer(toJsonDF, Row(toJsonExpected)) + + DataTypeTestUtils.dayTimeIntervalTypes.foreach { fromJsonDtype => + val fromJsonDF = toJsonDF + .select( + from_json($"json", StructType(StructField("key", fromJsonDtype) :: Nil)) as "value") + .selectExpr("value['key']") + if (toJsonDtype == fromJsonDtype) { + checkAnswer(fromJsonDF, Row(fromJsonExpected)) + } else { + checkAnswer(fromJsonDF, Row(null)) + } + } + } + } }