[SPARK-11672][ML] flaky spark.ml read/write tests
We set `sqlContext = null` in `afterAll`. However, this doesn't change `SQLContext.activeContext` and then `SQLContext.getOrCreate` might use the `SparkContext` from previous test suite and hence causes the error. This PR calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context from other test suites. cc: yhuai Author: Xiangrui Meng <meng@databricks.com> Closes #9677 from mengxr/SPARK-11672.2.
This commit is contained in:
parent
e4e46b20f6
commit
e71c07557c
|
@ -23,7 +23,7 @@ import java.io.IOException;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Ignore;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.sql.SQLContext;
|
import org.apache.spark.sql.SQLContext;
|
||||||
|
@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite {
|
||||||
Utils.deleteRecursively(tempDir);
|
Utils.deleteRecursively(tempDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Ignore // SPARK-11672
|
@Test
|
||||||
public void testDefaultReadWrite() throws IOException {
|
public void testDefaultReadWrite() throws IOException {
|
||||||
String uid = "my_params";
|
String uid = "my_params";
|
||||||
MyParams instance = new MyParams(uid);
|
MyParams instance = new MyParams(uid);
|
||||||
|
|
|
@ -872,7 +872,7 @@ class LogisticRegressionSuite
|
||||||
assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
|
assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
|
||||||
}
|
}
|
||||||
|
|
||||||
ignore("read/write") { // SPARK-11672
|
test("read/write") {
|
||||||
// Set some Params to make sure set Params are serialized.
|
// Set some Params to make sure set Params are serialized.
|
||||||
val lr = new LogisticRegression()
|
val lr = new LogisticRegression()
|
||||||
.setElasticNetParam(0.1)
|
.setElasticNetParam(0.1)
|
||||||
|
|
|
@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ignore("read/write") { // SPARK-11672
|
test("read/write") {
|
||||||
val binarizer = new Binarizer()
|
val binarizer = new Binarizer()
|
||||||
.setInputCol("feature")
|
.setInputCol("feature")
|
||||||
.setOutputCol("binarized_feature")
|
.setOutputCol("binarized_feature")
|
||||||
|
|
|
@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] {
|
||||||
class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext
|
class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext
|
||||||
with DefaultReadWriteTest {
|
with DefaultReadWriteTest {
|
||||||
|
|
||||||
ignore("default read/write") { // SPARK-11672
|
test("default read/write") {
|
||||||
val myParams = new MyParams("my_params")
|
val myParams = new MyParams("my_params")
|
||||||
testDefaultReadWrite(myParams)
|
testDefaultReadWrite(myParams)
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,11 +32,13 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite =>
|
||||||
.setMaster("local[2]")
|
.setMaster("local[2]")
|
||||||
.setAppName("MLlibUnitTest")
|
.setAppName("MLlibUnitTest")
|
||||||
sc = new SparkContext(conf)
|
sc = new SparkContext(conf)
|
||||||
|
SQLContext.clearActive()
|
||||||
sqlContext = new SQLContext(sc)
|
sqlContext = new SQLContext(sc)
|
||||||
}
|
}
|
||||||
|
|
||||||
override def afterAll() {
|
override def afterAll() {
|
||||||
sqlContext = null
|
sqlContext = null
|
||||||
|
SQLContext.clearActive()
|
||||||
if (sc != null) {
|
if (sc != null) {
|
||||||
sc.stop()
|
sc.stop()
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue