[SPARK-15011][SQL] Re-enable 'analyze MetastoreRelations' in hive StatisticsSuite

## What changes were proposed in this pull request?
This test re-enables the `analyze MetastoreRelations` in `org.apache.spark.sql.hive.StatisticsSuite`.

The flakiness of this test was traced back to a shared configuration option, `hive.exec.compress.output`, in `TestHive`. This property was set to `true` by the `HiveCompatibilitySuite`. I have added configuration resetting logic to `HiveComparisonTest`, in order to prevent such a thing from happening again.

## How was this patch tested?
Is a test.

Author: Herman van Hovell <hvanhovell@databricks.com>
Author: Herman van Hovell <hvanhovell@questtec.nl>

Closes #13498 from hvanhovell/SPARK-15011.
This commit is contained in:
Herman van Hovell 2016-06-14 18:24:59 -07:00 committed by Reynold Xin
parent 214adb14b8
commit 0bd86c0fe4
2 changed files with 10 additions and 5 deletions

View file

@ -21,7 +21,7 @@ import java.io.{File, PrintWriter}
import scala.reflect.ClassTag
import org.apache.spark.sql.{QueryTest, Row}
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.command.AnalyzeTableCommand
import org.apache.spark.sql.execution.joins._
@ -115,7 +115,7 @@ class StatisticsSuite extends QueryTest with TestHiveSingleton with SQLTestUtils
}
}
ignore("analyze MetastoreRelations") {
test("analyze MetastoreRelations") {
def queryTotalSize(tableName: String): BigInt =
spark.sessionState.catalog.lookupRelation(TableIdentifier(tableName)).statistics.sizeInBytes
@ -155,14 +155,13 @@ class StatisticsSuite extends QueryTest with TestHiveSingleton with SQLTestUtils
sql("ANALYZE TABLE analyzeTable_part COMPUTE STATISTICS noscan")
// This seems to be flaky.
// assert(queryTotalSize("analyzeTable_part") === BigInt(17436))
assert(queryTotalSize("analyzeTable_part") === BigInt(17436))
sql("DROP TABLE analyzeTable_part").collect()
// Try to analyze a temp table
sql("""SELECT * FROM src""").createOrReplaceTempView("tempTable")
intercept[UnsupportedOperationException] {
intercept[AnalysisException] {
sql("ANALYZE TABLE tempTable COMPUTE STATISTICS")
}
spark.sessionState.catalog.dropTable(

View file

@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.execution
import java.io._
import java.nio.charset.StandardCharsets
import java.util
import scala.util.control.NonFatal
@ -497,6 +498,8 @@ abstract class HiveComparisonTest
}
}
val savedSettings = new util.HashMap[String, String]
savedSettings.putAll(TestHive.conf.settings)
try {
try {
if (tryWithoutResettingFirst && canSpeculativelyTryWithoutReset) {
@ -515,6 +518,9 @@ abstract class HiveComparisonTest
}
} catch {
case tf: org.scalatest.exceptions.TestFailedException => throw tf
} finally {
TestHive.conf.settings.clear()
TestHive.conf.settings.putAll(savedSettings)
}
}
}