Compare commits
5 Commits
861df43e8d
...
497d17f38a
Author | SHA1 | Date |
---|---|---|
Bruce Robbins | 497d17f38a | |
Sean Owen | 07edae9734 | |
wangguangxin.cn | 380177d0f1 | |
Bruce Robbins | f23a5441d4 | |
sychen | 333df85894 |
|
@ -85,7 +85,7 @@ function loadMore() {
|
|||
if (retStartByte == 0) {
|
||||
disableMoreButton();
|
||||
}
|
||||
$("pre", ".log-content").prepend(cleanData);
|
||||
$("pre", ".log-content").prepend(document.createTextNode(cleanData));
|
||||
|
||||
curLogLength = curLogLength + (startByte - retStartByte);
|
||||
startByte = retStartByte;
|
||||
|
@ -115,7 +115,7 @@ function loadNew() {
|
|||
var retLogLength = dataInfo[2];
|
||||
|
||||
var cleanData = data.substring(newlineIndex + 1);
|
||||
$("pre", ".log-content").append(cleanData);
|
||||
$("pre", ".log-content").append(document.createTextNode(cleanData));
|
||||
|
||||
curLogLength = curLogLength + (retEndByte - retStartByte);
|
||||
endByte = retEndByte;
|
||||
|
|
|
@ -117,7 +117,7 @@ case class AggregateExpression(
|
|||
// This is a bit of a hack. Really we should not be constructing this container and reasoning
|
||||
// about datatypes / aggregation mode until after we have finished analysis and made it to
|
||||
// planning.
|
||||
UnresolvedAttribute(aggregateFunction.toString)
|
||||
UnresolvedAttribute.quoted(aggregateFunction.toString)
|
||||
}
|
||||
|
||||
def filterAttributes: AttributeSet = filter.map(_.references).getOrElse(AttributeSet.empty)
|
||||
|
|
|
@ -444,20 +444,25 @@ case class Inline(child: Expression) extends UnaryExpression with CollectionGene
|
|||
}
|
||||
|
||||
override def elementSchema: StructType = child.dataType match {
|
||||
case ArrayType(st: StructType, _) => st
|
||||
case ArrayType(st: StructType, false) => st
|
||||
case ArrayType(st: StructType, true) => st.asNullable
|
||||
}
|
||||
|
||||
override def collectionType: DataType = child.dataType
|
||||
|
||||
private lazy val numFields = elementSchema.fields.length
|
||||
|
||||
private lazy val generatorNullRow = new GenericInternalRow(elementSchema.length)
|
||||
|
||||
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
|
||||
val inputArray = child.eval(input).asInstanceOf[ArrayData]
|
||||
if (inputArray == null) {
|
||||
Nil
|
||||
} else {
|
||||
for (i <- 0 until inputArray.numElements())
|
||||
yield inputArray.getStruct(i, numFields)
|
||||
for (i <- 0 until inputArray.numElements()) yield {
|
||||
val s = inputArray.getStruct(i, numFields)
|
||||
if (s == null) generatorNullRow else s
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -206,7 +206,7 @@ case class Alias(child: Expression, name: String)(
|
|||
if (resolved) {
|
||||
AttributeReference(name, child.dataType, child.nullable, metadata)(exprId, qualifier)
|
||||
} else {
|
||||
UnresolvedAttribute(name)
|
||||
UnresolvedAttribute.quoted(name)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -359,7 +359,17 @@ object UnwrapCastInBinaryComparison extends Rule[LogicalPlan] {
|
|||
!fromExp.foldable &&
|
||||
fromExp.dataType.isInstanceOf[NumericType] &&
|
||||
toType.isInstanceOf[NumericType] &&
|
||||
Cast.canUpCast(fromExp.dataType, toType)
|
||||
canUnwrapCast(fromExp.dataType, toType)
|
||||
}
|
||||
|
||||
private def canUnwrapCast(from: DataType, to: DataType): Boolean = (from, to) match {
|
||||
// SPARK-39476: It's not safe to unwrap cast from Integer to Float or from Long to Float/Double,
|
||||
// since the length of Integer/Long may exceed the significant digits of Float/Double.
|
||||
case (IntegerType, FloatType) => false
|
||||
case (LongType, FloatType) => false
|
||||
case (LongType, DoubleType) => false
|
||||
case _ if from.isInstanceOf[NumericType] => Cast.canUpCast(from, to)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
private[optimizer] def getRange(dt: DataType): Option[(Any, Any)] = dt match {
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCo
|
|||
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
|
||||
import org.apache.spark.sql.catalyst.trees.LeafLike
|
||||
import org.apache.spark.sql.functions._
|
||||
import org.apache.spark.sql.internal.SQLConf
|
||||
import org.apache.spark.sql.test.SharedSparkSession
|
||||
import org.apache.spark.sql.types.{IntegerType, StructType}
|
||||
|
||||
|
@ -364,6 +365,41 @@ class GeneratorFunctionSuite extends QueryTest with SharedSparkSession {
|
|||
df.select(Stream(explode(array(min($"v"), max($"v"))), sum($"v")): _*),
|
||||
Row(1, 6) :: Row(3, 6) :: Nil)
|
||||
}
|
||||
|
||||
def testNullStruct(): Unit = {
|
||||
val df = sql(
|
||||
"""select * from values
|
||||
|(
|
||||
| 1,
|
||||
| array(
|
||||
| named_struct('c1', 0, 'c2', 1),
|
||||
| null,
|
||||
| named_struct('c1', 2, 'c2', 3),
|
||||
| null
|
||||
| )
|
||||
|)
|
||||
|as tbl(a, b)
|
||||
""".stripMargin)
|
||||
df.createOrReplaceTempView("t1")
|
||||
|
||||
checkAnswer(
|
||||
sql("select inline(b) from t1"),
|
||||
Row(0, 1) :: Row(null, null) :: Row(2, 3) :: Row(null, null) :: Nil)
|
||||
|
||||
checkAnswer(
|
||||
sql("select a, inline(b) from t1"),
|
||||
Row(1, 0, 1) :: Row(1, null, null) :: Row(1, 2, 3) :: Row(1, null, null) :: Nil)
|
||||
}
|
||||
|
||||
test("SPARK-39061: inline should handle null struct") {
|
||||
testNullStruct
|
||||
}
|
||||
|
||||
test("SPARK-39496: inline eval path should handle null struct") {
|
||||
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false") {
|
||||
testNullStruct
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case class EmptyGenerator() extends Generator with LeafLike[Expression] {
|
||||
|
|
|
@ -1921,4 +1921,32 @@ class SubquerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
|
|||
}.getMessage.contains("Correlated column is not allowed in predicate"))
|
||||
}
|
||||
}
|
||||
|
||||
test("SPARK-39355: Single column uses quoted to construct UnresolvedAttribute") {
|
||||
checkAnswer(
|
||||
sql("""
|
||||
|SELECT *
|
||||
|FROM (
|
||||
| SELECT '2022-06-01' AS c1
|
||||
|) a
|
||||
|WHERE c1 IN (
|
||||
| SELECT date_add('2022-06-01', 0)
|
||||
|)
|
||||
|""".stripMargin),
|
||||
Row("2022-06-01"))
|
||||
checkAnswer(
|
||||
sql("""
|
||||
|SELECT *
|
||||
|FROM (
|
||||
| SELECT '2022-06-01' AS c1
|
||||
|) a
|
||||
|WHERE c1 IN (
|
||||
| SELECT date_add(a.c1.k1, 0)
|
||||
| FROM (
|
||||
| SELECT named_struct('k1', '2022-06-01') AS c1
|
||||
| ) a
|
||||
|)
|
||||
|""".stripMargin),
|
||||
Row("2022-06-01"))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -190,5 +190,36 @@ class UnwrapCastInComparisonEndToEndSuite extends QueryTest with SharedSparkSess
|
|||
}
|
||||
}
|
||||
|
||||
test("SPARK-39476: Should not unwrap cast from Long to Double/Float") {
|
||||
withTable(t) {
|
||||
Seq((6470759586864300301L))
|
||||
.toDF("c1").write.saveAsTable(t)
|
||||
val df = spark.table(t)
|
||||
|
||||
checkAnswer(
|
||||
df.where("cast(c1 as double) == cast(6470759586864300301L as double)")
|
||||
.select("c1"),
|
||||
Row(6470759586864300301L))
|
||||
|
||||
checkAnswer(
|
||||
df.where("cast(c1 as float) == cast(6470759586864300301L as float)")
|
||||
.select("c1"),
|
||||
Row(6470759586864300301L))
|
||||
}
|
||||
}
|
||||
|
||||
test("SPARK-39476: Should not unwrap cast from Integer to Float") {
|
||||
withTable(t) {
|
||||
Seq((33554435))
|
||||
.toDF("c1").write.saveAsTable(t)
|
||||
val df = spark.table(t)
|
||||
|
||||
checkAnswer(
|
||||
df.where("cast(c1 as float) == cast(33554435 as float)")
|
||||
.select("c1"),
|
||||
Row(33554435))
|
||||
}
|
||||
}
|
||||
|
||||
private def decimal(v: BigDecimal): Decimal = Decimal(v, 5, 2)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue