Revert "[SPARK-34064][SQL] Cancel the running broadcast sub-jobs when SQL statement is cancelled"
This reverts commit f1b21ba505
.
This commit is contained in:
parent
00d43b1f82
commit
6cd0092150
|
@ -24,7 +24,7 @@ import scala.concurrent.{ExecutionContext, Promise}
|
|||
import scala.concurrent.duration.NANOSECONDS
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
import org.apache.spark.{broadcast, SparkContext, SparkException}
|
||||
import org.apache.spark.{broadcast, SparkException}
|
||||
import org.apache.spark.launcher.SparkLauncher
|
||||
import org.apache.spark.rdd.RDD
|
||||
import org.apache.spark.sql.catalyst.InternalRow
|
||||
|
@ -74,10 +74,7 @@ case class BroadcastExchangeExec(
|
|||
child: SparkPlan) extends BroadcastExchangeLike {
|
||||
import BroadcastExchangeExec._
|
||||
|
||||
// Cancelling a SQL statement from Spark ThriftServer needs to cancel
|
||||
// its related broadcast sub-jobs. So set the run id to job group id if exists.
|
||||
override val runId: UUID = Option(sparkContext.getLocalProperty(SparkContext.SPARK_JOB_GROUP_ID))
|
||||
.map(UUID.fromString).getOrElse(UUID.randomUUID)
|
||||
override val runId: UUID = UUID.randomUUID
|
||||
|
||||
override lazy val metrics = Map(
|
||||
"dataSize" -> SQLMetrics.createSizeMetric(sparkContext, "data size"),
|
||||
|
|
Loading…
Reference in a new issue