From c0d84e6cf1046b7944796038414ef21fe9c7e3b5 Mon Sep 17 00:00:00 2001 From: gengjiaan Date: Tue, 20 Jul 2021 19:16:56 +0300 Subject: [PATCH] [SPARK-36222][SQL] Step by days in the Sequence expression for dates ### What changes were proposed in this pull request? The current implement of `Sequence` expression not support step by days for dates. ``` spark-sql> select sequence(date'2021-07-01', date'2021-07-10', interval '3' day); Error in query: cannot resolve 'sequence(DATE '2021-07-01', DATE '2021-07-10', INTERVAL '3' DAY)' due to data type mismatch: sequence uses the wrong parameter type. The parameter type must conform to: 1. The start and stop expressions must resolve to the same type. 2. If start and stop expressions resolve to the 'date' or 'timestamp' type then the step expression must resolve to the 'interval' or 'interval year to month' or 'interval day to second' type, otherwise to the same type as the start and stop expressions. ; line 1 pos 7; 'Project [unresolvedalias(sequence(2021-07-01, 2021-07-10, Some(INTERVAL '3' DAY), Some(Europe/Moscow)), None)] +- OneRowRelation ``` ### Why are the changes needed? `DayTimeInterval` has day granularity should as step for dates. ### Does this PR introduce _any_ user-facing change? 'Yes'. Sequence expression will supports step by `DayTimeInterval` has day granularity for dates. ### How was this patch tested? New tests. Closes #33439 from beliefer/SPARK-36222. Authored-by: gengjiaan Signed-off-by: Max Gekk --- .../expressions/collectionOperations.scala | 17 ++++-- .../CollectionExpressionsSuite.scala | 61 +++++++++++++++++-- 2 files changed, 68 insertions(+), 10 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index 730b8d0f34..2c3312a826 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -2574,7 +2574,8 @@ case class Sequence( DayTimeIntervalType.acceptsType(stepType) case DateType => stepOpt.isEmpty || CalendarIntervalType.acceptsType(stepType) || - YearMonthIntervalType.acceptsType(stepType) + YearMonthIntervalType.acceptsType(stepType) || + DayTimeIntervalType.acceptsType(stepType) case _: IntegralType => stepOpt.isEmpty || stepType.sameType(startType) case _ => false @@ -2626,8 +2627,10 @@ case class Sequence( case DateType => if (stepOpt.isEmpty || CalendarIntervalType.acceptsType(stepOpt.get.dataType)) { new TemporalSequenceImpl[Int](IntegerType, start.dataType, MICROS_PER_DAY, _.toInt, zoneId) - } else { + } else if (YearMonthIntervalType.acceptsType(stepOpt.get.dataType)) { new PeriodSequenceImpl[Int](IntegerType, start.dataType, MICROS_PER_DAY, _.toInt, zoneId) + } else { + new DurationSequenceImpl[Int](IntegerType, start.dataType, MICROS_PER_DAY, _.toInt, zoneId) } } @@ -2807,15 +2810,19 @@ object Sequence { val intervalType: DataType = DayTimeIntervalType() def splitStep(input: Any): (Int, Int, Long) = { - (0, 0, input.asInstanceOf[Long]) + val duration = input.asInstanceOf[Long] + val days = IntervalUtils.getDays(duration) + val micros = duration - days * MICROS_PER_DAY + (0, days, micros) } def stepSplitCode( stepMonths: String, stepDays: String, stepMicros: String, step: String): String = { s""" |final int $stepMonths = 0; - |final int $stepDays = 0; - |final long $stepMicros = $step; + |final int $stepDays = + | (int) org.apache.spark.sql.catalyst.util.IntervalUtils.getDays($step); + |final long $stepMicros = $step - $stepDays * ${MICROS_PER_DAY}L; """.stripMargin } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala index bfecbf5766..caa5e9655c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala @@ -919,6 +919,16 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper Date.valueOf("2020-11-01"), Date.valueOf("2022-04-01"))) + checkEvaluation(new Sequence( + Literal(Date.valueOf("2021-07-01")), + Literal(Date.valueOf("2021-07-10")), + Literal(fromDayTimeString("3 0:0:0"))), + Seq( + Date.valueOf("2021-07-01"), + Date.valueOf("2021-07-04"), + Date.valueOf("2021-07-07"), + Date.valueOf("2021-07-10"))) + checkExceptionInExpression[IllegalArgumentException]( new Sequence( Literal(Date.valueOf("1970-01-02")), @@ -1092,6 +1102,52 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper Date.valueOf("2020-11-01"), Date.valueOf("2022-04-01"))) + checkEvaluation(new Sequence( + Literal(Date.valueOf("2021-07-01")), + Literal(Date.valueOf("2021-07-10")), + Literal(Duration.ofDays(3))), + Seq( + Date.valueOf("2021-07-01"), + Date.valueOf("2021-07-04"), + Date.valueOf("2021-07-07"), + Date.valueOf("2021-07-10"))) + + checkExceptionInExpression[IllegalArgumentException]( + new Sequence( + Literal(Date.valueOf("2021-07-01")), + Literal(Date.valueOf("2021-07-10")), + Literal(Duration.ofHours(3))), + EmptyRow, + "sequence step must be an interval day to second of day granularity" + + " if start and end values are dates") + + checkExceptionInExpression[IllegalArgumentException]( + new Sequence( + Literal(Date.valueOf("2021-07-01")), + Literal(Date.valueOf("2021-07-10")), + Literal(Duration.ofMinutes(3))), + EmptyRow, + "sequence step must be an interval day to second of day granularity" + + " if start and end values are dates") + + checkExceptionInExpression[IllegalArgumentException]( + new Sequence( + Literal(Date.valueOf("2021-07-01")), + Literal(Date.valueOf("2021-07-10")), + Literal(Duration.ofSeconds(3))), + EmptyRow, + "sequence step must be an interval day to second of day granularity" + + " if start and end values are dates") + + checkExceptionInExpression[IllegalArgumentException]( + new Sequence( + Literal(Date.valueOf("2021-07-01")), + Literal(Date.valueOf("2021-07-10")), + Literal(Duration.ofMillis(3))), + EmptyRow, + "sequence step must be an interval day to second of day granularity" + + " if start and end values are dates") + checkExceptionInExpression[IllegalArgumentException]( new Sequence( Literal(Date.valueOf("2018-01-01")), @@ -1108,11 +1164,6 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper Literal(Period.ofMonths(-1))), EmptyRow, s"sequence boundaries: 0 to 2678400000000 by -1") - - assert(Sequence( - Cast(Literal("2011-03-01"), DateType), - Cast(Literal("2011-04-01"), DateType), - Option(Literal(Duration.ofHours(1)))).checkInputDataTypes().isFailure) } }