[SPARK-35871][SQL] Literal.create(value, dataType) should support fields
### What changes were proposed in this pull request? Current Literal.create(data, dataType) for Period to YearMonthIntervalType and Duration to DayTimeIntervalType is not correct. if data type is Period/Duration, it will create converter of default YearMonthIntervalType/DayTimeIntervalType, then the result is not correct, this pr fix this bug. ### Why are the changes needed? Fix bug when use Literal.create() ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Added UT Closes #33056 from AngersZhuuuu/SPARK-35871. Authored-by: Angerszhuuuu <angers.zhu@gmail.com> Signed-off-by: Max Gekk <max.gekk@gmail.com>
This commit is contained in:
parent
d40a1a2552
commit
de35675c61
|
@ -153,7 +153,13 @@ object Literal {
|
||||||
def fromObject(obj: Any): Literal = new Literal(obj, ObjectType(obj.getClass))
|
def fromObject(obj: Any): Literal = new Literal(obj, ObjectType(obj.getClass))
|
||||||
|
|
||||||
def create(v: Any, dataType: DataType): Literal = {
|
def create(v: Any, dataType: DataType): Literal = {
|
||||||
Literal(CatalystTypeConverters.convertToCatalyst(v), dataType)
|
dataType match {
|
||||||
|
case _: YearMonthIntervalType if v.isInstanceOf[Period] =>
|
||||||
|
Literal(CatalystTypeConverters.createToCatalystConverter(dataType)(v), dataType)
|
||||||
|
case _: DayTimeIntervalType if v.isInstanceOf[Duration] =>
|
||||||
|
Literal(CatalystTypeConverters.createToCatalystConverter(dataType)(v), dataType)
|
||||||
|
case _ => Literal(CatalystTypeConverters.convertToCatalyst(v), dataType)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def create[T : TypeTag](v: T): Literal = Try {
|
def create[T : TypeTag](v: T): Literal = Try {
|
||||||
|
|
|
@ -32,6 +32,8 @@ import org.apache.spark.sql.catalyst.util.DateTimeConstants._
|
||||||
import org.apache.spark.sql.catalyst.util.DateTimeUtils
|
import org.apache.spark.sql.catalyst.util.DateTimeUtils
|
||||||
import org.apache.spark.sql.internal.SQLConf
|
import org.apache.spark.sql.internal.SQLConf
|
||||||
import org.apache.spark.sql.types._
|
import org.apache.spark.sql.types._
|
||||||
|
import org.apache.spark.sql.types.DayTimeIntervalType._
|
||||||
|
import org.apache.spark.sql.types.YearMonthIntervalType._
|
||||||
import org.apache.spark.unsafe.types.CalendarInterval
|
import org.apache.spark.unsafe.types.CalendarInterval
|
||||||
|
|
||||||
class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
|
class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
|
||||||
|
@ -432,4 +434,26 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
|
||||||
assert(literal.toString === expected)
|
assert(literal.toString === expected)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test("SPARK-35871: Literal.create(value, dataType) should support fields") {
|
||||||
|
val period = Period.ofMonths(13)
|
||||||
|
DataTypeTestUtils.yearMonthIntervalTypes.foreach { dt =>
|
||||||
|
val result = dt.endField match {
|
||||||
|
case YEAR => 12
|
||||||
|
case MONTH => 13
|
||||||
|
}
|
||||||
|
checkEvaluation(Literal.create(period, dt), result)
|
||||||
|
}
|
||||||
|
|
||||||
|
val duration = Duration.ofSeconds(86400 + 3600 + 60 + 1)
|
||||||
|
DataTypeTestUtils.dayTimeIntervalTypes.foreach { dt =>
|
||||||
|
val result = dt.endField match {
|
||||||
|
case DAY => 86400000000L
|
||||||
|
case HOUR => 90000000000L
|
||||||
|
case MINUTE => 90060000000L
|
||||||
|
case SECOND => 90061000000L
|
||||||
|
}
|
||||||
|
checkEvaluation(Literal.create(duration, dt), result)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue