[SPARK-22894][SQL] DateTimeOperations should accept SQL like string type

## What changes were proposed in this pull request?

`DateTimeOperations` accept [`StringType`](ae998ec2b5/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala (L669)),  but:

```
spark-sql> SELECT '2017-12-24' + interval 2 months 2 seconds;
Error in query: cannot resolve '(CAST('2017-12-24' AS DOUBLE) + interval 2 months 2 seconds)' due to data type mismatch: differing types in '(CAST('2017-12-24' AS DOUBLE) + interval 2 months 2 seconds)' (double and calendarinterval).; line 1 pos 7;
'Project [unresolvedalias((cast(2017-12-24 as double) + interval 2 months 2 seconds), None)]
+- OneRowRelation
spark-sql>
```

After this PR:
```
spark-sql> SELECT '2017-12-24' + interval 2 months 2 seconds;
2018-02-24 00:00:02
Time taken: 0.2 seconds, Fetched 1 row(s)

```

## How was this patch tested?

unit tests

Author: Yuming Wang <wgyumg@gmail.com>

Closes #20067 from wangyum/SPARK-22894.
This commit is contained in:
Yuming Wang 2017-12-26 09:40:41 -08:00 committed by gatorsmile
parent 9348e68420
commit 91d1b300d4
2 changed files with 16 additions and 3 deletions

View file

@ -324,9 +324,11 @@ object TypeCoercion {
// Skip nodes who's children have not been resolved yet.
case e if !e.childrenResolved => e
case a @ BinaryArithmetic(left @ StringType(), right) =>
case a @ BinaryArithmetic(left @ StringType(), right)
if right.dataType != CalendarIntervalType =>
a.makeCopy(Array(Cast(left, DoubleType), right))
case a @ BinaryArithmetic(left, right @ StringType()) =>
case a @ BinaryArithmetic(left, right @ StringType())
if left.dataType != CalendarIntervalType =>
a.makeCopy(Array(left, Cast(right, DoubleType)))
// For equality between string and timestamp we cast the string to a timestamp

View file

@ -20,7 +20,7 @@ package org.apache.spark.sql
import java.io.File
import java.math.MathContext
import java.net.{MalformedURLException, URL}
import java.sql.Timestamp
import java.sql.{Date, Timestamp}
import java.util.concurrent.atomic.AtomicBoolean
import org.apache.spark.{AccumulatorSuite, SparkException}
@ -2760,6 +2760,17 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
}
}
test("SPARK-22894: DateTimeOperations should accept SQL like string type") {
val date = "2017-12-24"
val str = sql(s"SELECT CAST('$date' as STRING) + interval 2 months 2 seconds")
val dt = sql(s"SELECT CAST('$date' as DATE) + interval 2 months 2 seconds")
val ts = sql(s"SELECT CAST('$date' as TIMESTAMP) + interval 2 months 2 seconds")
checkAnswer(str, Row("2018-02-24 00:00:02") :: Nil)
checkAnswer(dt, Row(Date.valueOf("2018-02-24")) :: Nil)
checkAnswer(ts, Row(Timestamp.valueOf("2018-02-24 00:00:02")) :: Nil)
}
// Only New OrcFileFormat supports this
Seq(classOf[org.apache.spark.sql.execution.datasources.orc.OrcFileFormat].getCanonicalName,
"parquet").foreach { format =>