Revert "[HOTFIX] Synchronize on SQLContext.settings in tests."

This reverts commit d4c30cd991.
This commit is contained in:
Patrick Wendell 2014-07-09 19:36:38 -07:00
parent 2e0a037dff
commit dd22bc2d57
4 changed files with 78 additions and 86 deletions

View file

@ -52,7 +52,7 @@ trait SQLConf {
/** ********************** SQLConf functionality methods ************ */
@transient
protected[sql] val settings = java.util.Collections.synchronizedMap(
private val settings = java.util.Collections.synchronizedMap(
new java.util.HashMap[String, String]())
def set(props: Properties): Unit = {

View file

@ -39,27 +39,25 @@ class JoinSuite extends QueryTest {
test("plans broadcast hash join, given hints") {
def mkTest(buildSide: BuildSide, leftTable: String, rightTable: String) = {
TestSQLContext.settings.synchronized {
TestSQLContext.set("spark.sql.join.broadcastTables",
s"${if (buildSide == BuildRight) rightTable else leftTable}")
val rdd = sql( s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
// Using `sparkPlan` because for relevant patterns in HashJoin to be
// matched, other strategies need to be applied.
val physical = rdd.queryExecution.sparkPlan
val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j}
TestSQLContext.set("spark.sql.join.broadcastTables",
s"${if (buildSide == BuildRight) rightTable else leftTable}")
val rdd = sql(s"""SELECT * FROM $leftTable JOIN $rightTable ON key = a""")
// Using `sparkPlan` because for relevant patterns in HashJoin to be
// matched, other strategies need to be applied.
val physical = rdd.queryExecution.sparkPlan
val bhj = physical.collect { case j: BroadcastHashJoin if j.buildSide == buildSide => j }
assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
checkAnswer(
rdd,
Seq(
(1, "1", 1, 1),
(1, "1", 1, 2),
(2, "2", 2, 1),
(2, "2", 2, 2),
(3, "3", 3, 1),
(3, "3", 3, 2)
))
}
assert(bhj.size === 1, "planner does not pick up hint to generate broadcast hash join")
checkAnswer(
rdd,
Seq(
(1, "1", 1, 1),
(1, "1", 1, 2),
(2, "2", 2, 1),
(2, "2", 2, 2),
(3, "3", 3, 1),
(3, "3", 3, 2)
))
}
mkTest(BuildRight, "testData", "testData2")

View file

@ -28,50 +28,46 @@ class SQLConfSuite extends QueryTest {
val testVal = "test.val.0"
test("programmatic ways of basic setting and getting") {
TestSQLContext.settings.synchronized {
clear()
assert(getOption(testKey).isEmpty)
assert(getAll.toSet === Set())
clear()
assert(getOption(testKey).isEmpty)
assert(getAll.toSet === Set())
set(testKey, testVal)
assert(get(testKey) == testVal)
assert(get(testKey, testVal + "_") == testVal)
assert(getOption(testKey) == Some(testVal))
assert(contains(testKey))
set(testKey, testVal)
assert(get(testKey) == testVal)
assert(get(testKey, testVal + "_") == testVal)
assert(getOption(testKey) == Some(testVal))
assert(contains(testKey))
// Tests SQLConf as accessed from a SQLContext is mutable after
// the latter is initialized, unlike SparkConf inside a SparkContext.
assert(TestSQLContext.get(testKey) == testVal)
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
assert(TestSQLContext.getOption(testKey) == Some(testVal))
assert(TestSQLContext.contains(testKey))
// Tests SQLConf as accessed from a SQLContext is mutable after
// the latter is initialized, unlike SparkConf inside a SparkContext.
assert(TestSQLContext.get(testKey) == testVal)
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
assert(TestSQLContext.getOption(testKey) == Some(testVal))
assert(TestSQLContext.contains(testKey))
clear()
}
clear()
}
test("parse SQL set commands") {
TestSQLContext.settings.synchronized {
clear()
sql(s"set $testKey=$testVal")
assert(get(testKey, testVal + "_") == testVal)
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
clear()
sql(s"set $testKey=$testVal")
assert(get(testKey, testVal + "_") == testVal)
assert(TestSQLContext.get(testKey, testVal + "_") == testVal)
sql("set mapred.reduce.tasks=20")
assert(get("mapred.reduce.tasks", "0") == "20")
sql("set mapred.reduce.tasks = 40")
assert(get("mapred.reduce.tasks", "0") == "40")
sql("set mapred.reduce.tasks=20")
assert(get("mapred.reduce.tasks", "0") == "20")
sql("set mapred.reduce.tasks = 40")
assert(get("mapred.reduce.tasks", "0") == "40")
val key = "spark.sql.key"
val vs = "val0,val_1,val2.3,my_table"
sql(s"set $key=$vs")
assert(get(key, "0") == vs)
val key = "spark.sql.key"
val vs = "val0,val_1,val2.3,my_table"
sql(s"set $key=$vs")
assert(get(key, "0") == vs)
sql(s"set $key=")
assert(get(key, "0") == "")
sql(s"set $key=")
assert(get(key, "0") == "")
clear()
}
clear()
}
}

View file

@ -397,40 +397,38 @@ class SQLQuerySuite extends QueryTest {
}
test("SET commands semantics using sql()") {
TestSQLContext.settings.synchronized {
clear()
val testKey = "test.key.0"
val testVal = "test.val.0"
val nonexistentKey = "nonexistent"
clear()
val testKey = "test.key.0"
val testVal = "test.val.0"
val nonexistentKey = "nonexistent"
// "set" itself returns all config variables currently specified in SQLConf.
assert(sql("SET").collect().size == 0)
// "set" itself returns all config variables currently specified in SQLConf.
assert(sql("SET").collect().size == 0)
// "set key=val"
sql(s"SET $testKey=$testVal")
checkAnswer(
sql("SET"),
Seq(Seq(testKey, testVal))
)
// "set key=val"
sql(s"SET $testKey=$testVal")
checkAnswer(
sql("SET"),
Seq(Seq(testKey, testVal))
)
sql(s"SET ${testKey + testKey}=${testVal + testVal}")
checkAnswer(
sql("set"),
Seq(
Seq(testKey, testVal),
Seq(testKey + testKey, testVal + testVal))
)
sql(s"SET ${testKey + testKey}=${testVal + testVal}")
checkAnswer(
sql("set"),
Seq(
Seq(testKey, testVal),
Seq(testKey + testKey, testVal + testVal))
)
// "set key"
checkAnswer(
sql(s"SET $testKey"),
Seq(Seq(testKey, testVal))
)
checkAnswer(
sql(s"SET $nonexistentKey"),
Seq(Seq(nonexistentKey, "<undefined>"))
)
clear()
}
// "set key"
checkAnswer(
sql(s"SET $testKey"),
Seq(Seq(testKey, testVal))
)
checkAnswer(
sql(s"SET $nonexistentKey"),
Seq(Seq(nonexistentKey, "<undefined>"))
)
clear()
}
}