diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala index dff8f6f4d7..fcd533dc5f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala @@ -554,6 +554,16 @@ case class InSet(child: Expression, hset: Set[Any]) extends UnaryExpression with } @transient private[this] lazy val hasNull: Boolean = hset.contains(null) + @transient private[this] lazy val isNaN: Any => Boolean = child.dataType match { + case DoubleType => (value: Any) => java.lang.Double.isNaN(value.asInstanceOf[java.lang.Double]) + case FloatType => (value: Any) => java.lang.Float.isNaN(value.asInstanceOf[java.lang.Float]) + case _ => (_: Any) => false + } + @transient private[this] lazy val hasNaN = child.dataType match { + case DoubleType | FloatType => set.exists(isNaN) + case _ => false + } + override def nullable: Boolean = child.nullable || hasNull @@ -562,6 +572,8 @@ case class InSet(child: Expression, hset: Set[Any]) extends UnaryExpression with protected override def nullSafeEval(value: Any): Any = { if (set.contains(value)) { true + } else if (isNaN(value)) { + hasNaN } else if (hasNull) { null } else { @@ -593,15 +605,33 @@ case class InSet(child: Expression, hset: Set[Any]) extends UnaryExpression with private def genCodeWithSet(ctx: CodegenContext, ev: ExprCode): ExprCode = { nullSafeCodeGen(ctx, ev, c => { val setTerm = ctx.addReferenceObj("set", set) + val setIsNull = if (hasNull) { s"${ev.isNull} = !${ev.value};" } else { "" } - s""" - |${ev.value} = $setTerm.contains($c); - |$setIsNull - """.stripMargin + + val ret = child.dataType match { + case DoubleType => Some((v: Any) => s"java.lang.Double.isNaN($v)") + case FloatType => Some((v: Any) => s"java.lang.Float.isNaN($v)") + case _ => None + } + + ret.map { isNaN => + s""" + |if ($setTerm.contains($c)) { + | ${ev.value} = true; + |} else if (${isNaN(c)}) { + | ${ev.value} = $hasNaN; + |} + |$setIsNull + |""".stripMargin + }.getOrElse( + s""" + |${ev.value} = $setTerm.contains($c); + |$setIsNull + """.stripMargin) }) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala index 6f75623dc5..c34b37d7c6 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala @@ -644,4 +644,18 @@ class PredicateSuite extends SparkFunSuite with ExpressionEvalHelper { checkExpr(GreaterThan, Double.NaN, Double.NaN, false) checkExpr(GreaterThan, 0.0, -0.0, false) } + + test("SPARK-36792: InSet should handle Double.NaN and Float.NaN") { + checkInAndInSet(In(Literal(Double.NaN), Seq(Literal(Double.NaN), Literal(2d))), true) + checkInAndInSet(In(Literal.create(null, DoubleType), + Seq(Literal(Double.NaN), Literal(2d), Literal.create(null, DoubleType))), null) + checkInAndInSet(In(Literal.create(null, DoubleType), + Seq(Literal(Double.NaN), Literal(2d))), null) + checkInAndInSet(In(Literal(3d), + Seq(Literal(Double.NaN), Literal(2d))), false) + checkInAndInSet(In(Literal(3d), + Seq(Literal(Double.NaN), Literal(2d), Literal.create(null, DoubleType))), null) + checkInAndInSet(In(Literal(Double.NaN), + Seq(Literal(Double.NaN), Literal(2d), Literal.create(null, DoubleType))), true) + } }