[SPARK-27008][SQL][FOLLOWUP] Fix typo from *_EANBLED to *_ENABLED

## What changes were proposed in this pull request?

This fixes a typo in the SQL config value: DATETIME_JAVA8API_**EANBLED** -> DATETIME_JAVA8API_**ENABLED**.

## How was this patch tested?

This was tested by `RowEncoderSuite` and `LiteralExpressionSuite`.

Closes #24194 from MaxGekk/date-localdate-followup.

Authored-by: Maxim Gekk <max.gekk@gmail.com>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
This commit is contained in:
Maxim Gekk 2019-03-24 17:16:33 -07:00 committed by Dongjoon Hyun
parent a6c207c9c0
commit 52671d631d
6 changed files with 15 additions and 15 deletions

View file

@ -1710,7 +1710,7 @@ object SQLConf {
.booleanConf
.createWithDefault(true)
val DATETIME_JAVA8API_EANBLED = buildConf("spark.sql.datetime.java8API.enabled")
val DATETIME_JAVA8API_ENABLED = buildConf("spark.sql.datetime.java8API.enabled")
.doc("If the configuration property is set to true, java.time.Instant and " +
"java.time.LocalDate classes of Java 8 API are used as external types for " +
"Catalyst's TimestampType and DateType. If it is set to false, java.sql.Timestamp " +
@ -1906,7 +1906,7 @@ class SQLConf extends Serializable with Logging {
def fastHashAggregateRowMaxCapacityBit: Int = getConf(FAST_HASH_AGGREGATE_MAX_ROWS_CAPACITY_BIT)
def datetimeJava8ApiEnabled: Boolean = getConf(DATETIME_JAVA8API_EANBLED)
def datetimeJava8ApiEnabled: Boolean = getConf(DATETIME_JAVA8API_ENABLED)
/**
* Returns the [[Resolver]] for the current configuration, which can be used to determine if two

View file

@ -169,7 +169,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite with SQLHelper {
}
test("converting TimestampType to java.time.Instant") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
Seq(
-9463427405253013L,
-244000001L,
@ -199,7 +199,7 @@ class CatalystTypeConvertersSuite extends SparkFunSuite with SQLHelper {
}
test("converting DateType to java.time.LocalDate") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
Seq(
-701265,
-371419,

View file

@ -283,7 +283,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest {
}
test("encoding/decoding TimestampType to/from java.time.Instant") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val schema = new StructType().add("t", TimestampType)
val encoder = RowEncoder(schema).resolveAndBind()
val instant = java.time.Instant.parse("2019-02-26T16:56:00Z")
@ -295,7 +295,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest {
}
test("encoding/decoding DateType to/from java.time.LocalDate") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val schema = new StructType().add("d", DateType)
val encoder = RowEncoder(schema).resolveAndBind()
val localDate = java.time.LocalDate.parse("2019-02-27")

View file

@ -66,11 +66,11 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(Literal.default(BinaryType), "".getBytes(StandardCharsets.UTF_8))
checkEvaluation(Literal.default(DecimalType.USER_DEFAULT), Decimal(0))
checkEvaluation(Literal.default(DecimalType.SYSTEM_DEFAULT), Decimal(0))
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "false") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "false") {
checkEvaluation(Literal.default(DateType), DateTimeUtils.toJavaDate(0))
checkEvaluation(Literal.default(TimestampType), DateTimeUtils.toJavaTimestamp(0L))
}
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
checkEvaluation(Literal.default(DateType), LocalDate.ofEpochDay(0))
checkEvaluation(Literal.default(TimestampType), Instant.ofEpochSecond(0))
}
@ -251,7 +251,7 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
}
test("construct literals from arrays of java.time.LocalDate") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val localDate0 = LocalDate.of(2019, 3, 20)
checkEvaluation(Literal(Array(localDate0)), Array(localDate0))
val localDate1 = LocalDate.of(2100, 4, 22)
@ -272,7 +272,7 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
}
test("construct literals from arrays of java.time.Instant") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val instant0 = Instant.ofEpochMilli(0)
checkEvaluation(Literal(Array(instant0)), Array(instant0))
val instant1 = Instant.parse("2019-03-20T10:15:30Z")

View file

@ -127,16 +127,16 @@ public class JavaUDFSuite implements Serializable {
@SuppressWarnings("unchecked")
@Test
public void udf7Test() {
String originConf = spark.conf().get(SQLConf.DATETIME_JAVA8API_EANBLED().key());
String originConf = spark.conf().get(SQLConf.DATETIME_JAVA8API_ENABLED().key());
try {
spark.conf().set(SQLConf.DATETIME_JAVA8API_EANBLED().key(), "true");
spark.conf().set(SQLConf.DATETIME_JAVA8API_ENABLED().key(), "true");
spark.udf().register(
"plusDay",
(java.time.LocalDate ld) -> ld.plusDays(1), DataTypes.DateType);
Row result = spark.sql("SELECT plusDay(DATE '2019-02-26')").head();
Assert.assertEquals(LocalDate.parse("2019-02-27"), result.get(0));
} finally {
spark.conf().set(SQLConf.DATETIME_JAVA8API_EANBLED().key(), originConf);
spark.conf().set(SQLConf.DATETIME_JAVA8API_ENABLED().key(), originConf);
}
}
}

View file

@ -496,7 +496,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
}
test("Using java.time.Instant in UDF") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val expected = java.time.Instant.parse("2019-02-27T00:00:00Z")
val plusSec = udf((i: java.time.Instant) => i.plusSeconds(1))
val df = spark.sql("SELECT TIMESTAMP '2019-02-26 23:59:59Z' as t")
@ -506,7 +506,7 @@ class UDFSuite extends QueryTest with SharedSQLContext {
}
test("Using java.time.LocalDate in UDF") {
withSQLConf(SQLConf.DATETIME_JAVA8API_EANBLED.key -> "true") {
withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
val expected = java.time.LocalDate.parse("2019-02-27")
val plusDay = udf((i: java.time.LocalDate) => i.plusDays(1))
val df = spark.sql("SELECT DATE '2019-02-26' as d")