[SPARK-11199][SPARKR] Improve R context management story and add getOrCreate
* Changes api.r.SQLUtils to use ```SQLContext.getOrCreate``` instead of creating a new context. * Adds a simple test [SPARK-11199] #comment link with JIRA Author: Hossein <hossein@databricks.com> Closes #9185 from falaki/SPARK-11199.
This commit is contained in:
parent
8e629b10cb
commit
f6ecf14333
|
@ -62,6 +62,10 @@ mockLinesComplexType <-
|
|||
complexTypeJsonPath <- tempfile(pattern="sparkr-test", fileext=".tmp")
|
||||
writeLines(mockLinesComplexType, complexTypeJsonPath)
|
||||
|
||||
test_that("calling sparkRSQL.init returns existing SQL context", {
|
||||
expect_equal(sparkRSQL.init(sc), sqlContext)
|
||||
})
|
||||
|
||||
test_that("infer types and check types", {
|
||||
expect_equal(infer_type(1L), "integer")
|
||||
expect_equal(infer_type(1.0), "double")
|
||||
|
|
|
@ -32,7 +32,7 @@ private[r] object SQLUtils {
|
|||
SerDe.registerSqlSerDe((readSqlObject, writeSqlObject))
|
||||
|
||||
def createSQLContext(jsc: JavaSparkContext): SQLContext = {
|
||||
new SQLContext(jsc)
|
||||
SQLContext.getOrCreate(jsc.sc)
|
||||
}
|
||||
|
||||
def getJavaSparkContext(sqlCtx: SQLContext): JavaSparkContext = {
|
||||
|
|
Loading…
Reference in a new issue