[SQL] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.

Author: Michael Armbrust <michael@databricks.com>

Closes #761 from marmbrus/existingContext and squashes the following commits:

4651051 [Michael Armbrust] Make it possible to create Java/Python SQLContexts from an existing Scala SQLContext.
This commit is contained in:
Michael Armbrust 2014-05-13 21:23:51 -07:00 committed by Reynold Xin
parent 753b04dea4
commit 44233865cf
2 changed files with 7 additions and 4 deletions

View file

@ -28,7 +28,7 @@ class SQLContext:
register L{SchemaRDD}s as tables, execute sql over tables, cache tables, and read parquet files.
"""
def __init__(self, sparkContext):
def __init__(self, sparkContext, sqlContext = None):
"""
Create a new SQLContext.
@ -58,10 +58,13 @@ class SQLContext:
self._jvm = self._sc._jvm
self._pythonToJavaMap = self._jvm.PythonRDD.pythonToJavaMap
if sqlContext:
self._scala_SQLContext = sqlContext
@property
def _ssql_ctx(self):
"""
Accessor for the JVM SparkSQL context. Subclasses can overrite this property to provide
Accessor for the JVM SparkSQL context. Subclasses can override this property to provide
their own JVM Contexts.
"""
if not hasattr(self, '_scala_SQLContext'):

View file

@ -33,9 +33,9 @@ import org.apache.spark.util.Utils
/**
* The entry point for executing Spark SQL queries from a Java program.
*/
class JavaSQLContext(sparkContext: JavaSparkContext) {
class JavaSQLContext(val sqlContext: SQLContext) {
val sqlContext = new SQLContext(sparkContext.sc)
def this(sparkContext: JavaSparkContext) = this(new SQLContext(sparkContext.sc))
/**
* Executes a query expressed in SQL, returning the result as a JavaSchemaRDD