[SPARK-13187][SQL] Add boolean/long/double options in DataFrameReader/Writer

This patch adds option function for boolean, long, and double types. This makes it slightly easier for Spark users to specify options without turning them into strings. Using the JSON data source as an example.

Before this patch:
```scala
sqlContext.read.option("primitivesAsString", "true").json("/path/to/json")
```

After this patch:
Before this patch:
```scala
sqlContext.read.option("primitivesAsString", true).json("/path/to/json")
```

Author: Reynold Xin <rxin@databricks.com>

Closes #11072 from rxin/SPARK-13187.
This commit is contained in:
Reynold Xin 2016-02-04 22:43:44 -08:00
parent 352102ed0b
commit 82d84ff2dd
3 changed files with 67 additions and 0 deletions

View file

@ -78,6 +78,27 @@ class DataFrameReader private[sql](sqlContext: SQLContext) extends Logging {
this
}
/**
* Adds an input option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Boolean): DataFrameReader = option(key, value.toString)
/**
* Adds an input option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Long): DataFrameReader = option(key, value.toString)
/**
* Adds an input option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Double): DataFrameReader = option(key, value.toString)
/**
* (Scala-specific) Adds input options for the underlying data source.
*

View file

@ -95,6 +95,27 @@ final class DataFrameWriter private[sql](df: DataFrame) {
this
}
/**
* Adds an output option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Boolean): DataFrameWriter = option(key, value.toString)
/**
* Adds an output option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Long): DataFrameWriter = option(key, value.toString)
/**
* Adds an output option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Double): DataFrameWriter = option(key, value.toString)
/**
* (Scala-specific) Adds output options for the underlying data source.
*

View file

@ -162,4 +162,29 @@ class DataStreamReaderWriterSuite extends StreamTest with SharedSQLContext {
assert(LastOptions.parameters("path") == "/test")
}
test("test different data types for options") {
val df = sqlContext.read
.format("org.apache.spark.sql.streaming.test")
.option("intOpt", 56)
.option("boolOpt", false)
.option("doubleOpt", 6.7)
.stream("/test")
assert(LastOptions.parameters("intOpt") == "56")
assert(LastOptions.parameters("boolOpt") == "false")
assert(LastOptions.parameters("doubleOpt") == "6.7")
LastOptions.parameters = null
df.write
.format("org.apache.spark.sql.streaming.test")
.option("intOpt", 56)
.option("boolOpt", false)
.option("doubleOpt", 6.7)
.stream("/test")
.stop()
assert(LastOptions.parameters("intOpt") == "56")
assert(LastOptions.parameters("boolOpt") == "false")
assert(LastOptions.parameters("doubleOpt") == "6.7")
}
}