[SPARK-11672][ML] flaky spark.ml read/write tests

We set `sqlContext = null` in `afterAll`. However, this doesn't change `SQLContext.activeContext`  and then `SQLContext.getOrCreate` might use the `SparkContext` from previous test suite and hence causes the error. This PR calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context from other test suites.

cc: yhuai

Author: Xiangrui Meng <meng@databricks.com>

Closes #9677 from mengxr/SPARK-11672.2.
This commit is contained in:
Xiangrui Meng 2015-11-12 20:01:13 -08:00
parent e4e46b20f6
commit e71c07557c
5 changed files with 7 additions and 5 deletions

View file

@ -23,7 +23,7 @@ import java.io.IOException;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Test;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext; import org.apache.spark.sql.SQLContext;
@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite {
Utils.deleteRecursively(tempDir); Utils.deleteRecursively(tempDir);
} }
@Ignore // SPARK-11672 @Test
public void testDefaultReadWrite() throws IOException { public void testDefaultReadWrite() throws IOException {
String uid = "my_params"; String uid = "my_params";
MyParams instance = new MyParams(uid); MyParams instance = new MyParams(uid);

View file

@ -872,7 +872,7 @@ class LogisticRegressionSuite
assert(model1a0.intercept ~== model1b.intercept absTol 1E-3) assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
} }
ignore("read/write") { // SPARK-11672 test("read/write") {
// Set some Params to make sure set Params are serialized. // Set some Params to make sure set Params are serialized.
val lr = new LogisticRegression() val lr = new LogisticRegression()
.setElasticNetParam(0.1) .setElasticNetParam(0.1)

View file

@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau
} }
} }
ignore("read/write") { // SPARK-11672 test("read/write") {
val binarizer = new Binarizer() val binarizer = new Binarizer()
.setInputCol("feature") .setInputCol("feature")
.setOutputCol("binarized_feature") .setOutputCol("binarized_feature")

View file

@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] {
class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext
with DefaultReadWriteTest { with DefaultReadWriteTest {
ignore("default read/write") { // SPARK-11672 test("default read/write") {
val myParams = new MyParams("my_params") val myParams = new MyParams("my_params")
testDefaultReadWrite(myParams) testDefaultReadWrite(myParams)
} }

View file

@ -32,11 +32,13 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite =>
.setMaster("local[2]") .setMaster("local[2]")
.setAppName("MLlibUnitTest") .setAppName("MLlibUnitTest")
sc = new SparkContext(conf) sc = new SparkContext(conf)
SQLContext.clearActive()
sqlContext = new SQLContext(sc) sqlContext = new SQLContext(sc)
} }
override def afterAll() { override def afterAll() {
sqlContext = null sqlContext = null
SQLContext.clearActive()
if (sc != null) { if (sc != null) {
sc.stop() sc.stop()
} }