From fea9360ae70a6715655e6673e13f32959e02368b Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Tue, 1 Sep 2020 12:54:40 +0000 Subject: [PATCH] [SPARK-32757][SQL][FOLLOW-UP] Use child's output for canonicalization in SubqueryBroadcastExec ### What changes were proposed in this pull request? This is a followup of https://github.com/apache/spark/pull/29601 , to fix a small mistake in `SubqueryBroadcastExec`. `SubqueryBroadcastExec.doCanonicalize` should canonicalize the build keys with the query output, not the `SubqueryBroadcastExec.output`. ### Why are the changes needed? fix mistake ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? existing test Closes #29610 from cloud-fan/follow. Authored-by: Wenchen Fan Signed-off-by: Wenchen Fan --- .../org/apache/spark/sql/execution/SubqueryBroadcastExec.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala index 9735ca4a03..ddf0b72dd7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala @@ -55,7 +55,7 @@ case class SubqueryBroadcastExec( "collectTime" -> SQLMetrics.createMetric(sparkContext, "time to collect (ms)")) override def doCanonicalize(): SparkPlan = { - val keys = buildKeys.map(k => QueryPlan.normalizeExpressions(k, output)) + val keys = buildKeys.map(k => QueryPlan.normalizeExpressions(k, child.output)) SubqueryBroadcastExec("dpp", index, keys, child.canonicalized) }