[SPARK-4513][SQL] Support relational operator '<=>' in Spark SQL
The relational operator '<=>' is not working in Spark SQL. Same works in Spark HiveQL Author: ravipesala <ravindra.pesala@huawei.com> Closes #3387 from ravipesala/<=> and squashes the following commits: 7198e90 [ravipesala] Supporting relational operator '<=>' in Spark SQL
This commit is contained in:
parent
1c53a5db99
commit
98e9419784
|
@ -61,7 +61,7 @@ class SqlLexical(val keywords: Seq[String]) extends StdLexical {
|
|||
|
||||
delimiters += (
|
||||
"@", "*", "+", "-", "<", "=", "<>", "!=", "<=", ">=", ">", "/", "(", ")",
|
||||
",", ";", "%", "{", "}", ":", "[", "]", ".", "&", "|", "^", "~"
|
||||
",", ";", "%", "{", "}", ":", "[", "]", ".", "&", "|", "^", "~", "<=>"
|
||||
)
|
||||
|
||||
override lazy val token: Parser[Token] =
|
||||
|
|
|
@ -234,6 +234,7 @@ class SqlParser extends AbstractSparkSQLParser {
|
|||
| termExpression ~ (">=" ~> termExpression) ^^ { case e1 ~ e2 => GreaterThanOrEqual(e1, e2) }
|
||||
| termExpression ~ ("!=" ~> termExpression) ^^ { case e1 ~ e2 => Not(EqualTo(e1, e2)) }
|
||||
| termExpression ~ ("<>" ~> termExpression) ^^ { case e1 ~ e2 => Not(EqualTo(e1, e2)) }
|
||||
| termExpression ~ ("<=>" ~> termExpression) ^^ { case e1 ~ e2 => EqualNullSafe(e1, e2) }
|
||||
| termExpression ~ NOT.? ~ (BETWEEN ~> termExpression) ~ (AND ~> termExpression) ^^ {
|
||||
case e ~ not ~ el ~ eu =>
|
||||
val betweenExpr: Expression = And(GreaterThanOrEqual(e, el), LessThanOrEqual(e, eu))
|
||||
|
|
|
@ -973,4 +973,16 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
|
|||
checkAnswer(sql("SELECT a.b + 1 FROM data GROUP BY a.b + 1"), 2)
|
||||
dropTempTable("data")
|
||||
}
|
||||
|
||||
test("Supporting relational operator '<=>' in Spark SQL") {
|
||||
val nullCheckData1 = TestData(1,"1") :: TestData(2,null) :: Nil
|
||||
val rdd1 = sparkContext.parallelize((0 to 1).map(i => nullCheckData1(i)))
|
||||
rdd1.registerTempTable("nulldata1")
|
||||
val nullCheckData2 = TestData(1,"1") :: TestData(2,null) :: Nil
|
||||
val rdd2 = sparkContext.parallelize((0 to 1).map(i => nullCheckData2(i)))
|
||||
rdd2.registerTempTable("nulldata2")
|
||||
checkAnswer(sql("SELECT nulldata1.key FROM nulldata1 join " +
|
||||
"nulldata2 on nulldata1.value <=> nulldata2.value"),
|
||||
(1 to 2).map(i => Seq(i)))
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue