diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala index 9735ca4a03..ddf0b72dd7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SubqueryBroadcastExec.scala @@ -55,7 +55,7 @@ case class SubqueryBroadcastExec( "collectTime" -> SQLMetrics.createMetric(sparkContext, "time to collect (ms)")) override def doCanonicalize(): SparkPlan = { - val keys = buildKeys.map(k => QueryPlan.normalizeExpressions(k, output)) + val keys = buildKeys.map(k => QueryPlan.normalizeExpressions(k, child.output)) SubqueryBroadcastExec("dpp", index, keys, child.canonicalized) }