[SPARK-35595][TESTS] Support multiple loggers in testing method withLogAppender

### What changes were proposed in this pull request?

A test case of AdaptiveQueryExecSuite becomes flaky since there are too many debug logs in RootLogger:
https://github.com/Yikun/spark/runs/2715222392?check_suite_focus=true
https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/139125/testReport/

To fix it,  I suggest supporting multiple loggers in the testing method withLogAppender. So that the LogAppender gets clean target log outputs.

### Why are the changes needed?

Fix a flaky test case.
Also, reduce unnecessary memory cost in tests.

### Does this PR introduce _any_ user-facing change?

No
### How was this patch tested?

Unit test

Closes #32725 from gengliangwang/fixFlakyLogAppender.

Authored-by: Gengliang Wang <gengliang@apache.org>
Signed-off-by: Gengliang Wang <gengliang@apache.org>
This commit is contained in:
Gengliang Wang 2021-06-02 10:05:29 +08:00
parent 0ad5ae54b2
commit 9d0d4edb43
3 changed files with 21 additions and 11 deletions

View file

@ -217,19 +217,27 @@ abstract class SparkFunSuite
*/
protected def withLogAppender(
appender: Appender,
loggerName: Option[String] = None,
loggerNames: Seq[String] = Seq.empty,
level: Option[Level] = None)(
f: => Unit): Unit = {
val logger = loggerName.map(Logger.getLogger).getOrElse(Logger.getRootLogger)
val restoreLevel = logger.getLevel
logger.addAppender(appender)
if (level.isDefined) {
logger.setLevel(level.get)
val loggers = if (loggerNames.nonEmpty) {
loggerNames.map(Logger.getLogger)
} else {
Seq(Logger.getRootLogger)
}
val restoreLevels = loggers.map(_.getLevel)
loggers.foreach { logger =>
logger.addAppender(appender)
if (level.isDefined) {
logger.setLevel(level.get)
}
}
try f finally {
logger.removeAppender(appender)
loggers.foreach(_.removeAppender(appender))
if (level.isDefined) {
logger.setLevel(restoreLevel)
loggers.zipWithIndex.foreach { case (logger, i) =>
logger.setLevel(restoreLevels(i))
}
}
}
}

View file

@ -516,7 +516,7 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
test("SPARK-25113: should log when there exists generated methods above HugeMethodLimit") {
val appender = new LogAppender("huge method limit")
withLogAppender(appender, loggerName = Some(classOf[CodeGenerator[_, _]].getName)) {
withLogAppender(appender, loggerNames = Seq(classOf[CodeGenerator[_, _]].getName)) {
val x = 42
val expr = HugeCodeIntExpression(x)
val proj = GenerateUnsafeProjection.generate(Seq(expr))

View file

@ -823,7 +823,7 @@ class AdaptiveQueryExecSuite
val logAppender = new LogAppender("adaptive execution")
withLogAppender(
logAppender,
loggerName = Some(AdaptiveSparkPlanExec.getClass.getName.dropRight(1)),
loggerNames = Seq(AdaptiveSparkPlanExec.getClass.getName.dropRight(1)),
level = Some(Level.TRACE)) {
withSQLConf(
SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true",
@ -1613,7 +1613,9 @@ class AdaptiveQueryExecSuite
val testDf = df.groupBy("index")
.agg(sum($"pv").alias("pv"))
.join(dim, Seq("index"))
withLogAppender(testAppender, level = Some(Level.DEBUG)) {
val loggerNames =
Seq(classOf[BroadcastQueryStageExec].getName, classOf[ShuffleQueryStageExec].getName)
withLogAppender(testAppender, loggerNames, level = Some(Level.DEBUG)) {
withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true") {
val result = testDf.collect()
assert(result.length == 26)