[SPARK-31038][SQL] Add checkValue for spark.sql.session.timeZone
### What changes were proposed in this pull request? The `spark.sql.session.timeZone` config can accept any string value including invalid time zone ids, then it will fail other queries that rely on the time zone. We should do the value checking in the set phase and fail fast if the zone value is invalid. ### Why are the changes needed? improve configuration ### Does this PR introduce any user-facing change? yes, will fail fast if the value is a wrong timezone id ### How was this patch tested? add ut Closes #27792 from yaooqinn/SPARK-31038. Authored-by: Kent Yao <yaooqinn@hotmail.com> Signed-off-by: Wenchen Fan <wenchen@databricks.com>
This commit is contained in:
parent
9b602e26d2
commit
f45ae7f2c5
|
@ -24,6 +24,7 @@ import java.util.zip.Deflater
|
|||
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.immutable
|
||||
import scala.util.Try
|
||||
import scala.util.matching.Regex
|
||||
|
||||
import org.apache.hadoop.fs.Path
|
||||
|
@ -38,6 +39,7 @@ import org.apache.spark.sql.catalyst.analysis.{HintErrorLogger, Resolver}
|
|||
import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode
|
||||
import org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator
|
||||
import org.apache.spark.sql.catalyst.plans.logical.HintErrorHandler
|
||||
import org.apache.spark.sql.catalyst.util.DateTimeUtils
|
||||
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
|
||||
import org.apache.spark.unsafe.array.ByteArrayMethods
|
||||
import org.apache.spark.util.Utils
|
||||
|
@ -1649,11 +1651,17 @@ object SQLConf {
|
|||
.doubleConf
|
||||
.createWithDefault(0.9)
|
||||
|
||||
private def isValidTimezone(zone: String): Boolean = {
|
||||
Try { DateTimeUtils.getZoneId(zone) }.isSuccess
|
||||
}
|
||||
|
||||
val SESSION_LOCAL_TIMEZONE =
|
||||
buildConf("spark.sql.session.timeZone")
|
||||
.doc("""The ID of session local timezone, e.g. "GMT", "America/Los_Angeles", etc.""")
|
||||
.version("")
|
||||
.stringConf
|
||||
.checkValue(isValidTimezone, s"Cannot resolve the given timezone with" +
|
||||
" ZoneId.of(_, ZoneId.SHORT_IDS)")
|
||||
.createWithDefaultFunction(() => TimeZone.getDefault.getID)
|
||||
|
||||
val WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD =
|
||||
|
|
|
@ -348,4 +348,23 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
|
|||
}
|
||||
check(config2)
|
||||
}
|
||||
|
||||
test("spark.sql.session.timeZone should only accept valid zone id") {
|
||||
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "MIT")
|
||||
assert(sql(s"set ${SQLConf.SESSION_LOCAL_TIMEZONE.key}").head().getString(1) === "MIT")
|
||||
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "America/Chicago")
|
||||
assert(sql(s"set ${SQLConf.SESSION_LOCAL_TIMEZONE.key}").head().getString(1) ===
|
||||
"America/Chicago")
|
||||
|
||||
intercept[IllegalArgumentException] {
|
||||
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "pst")
|
||||
}
|
||||
intercept[IllegalArgumentException] {
|
||||
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "GMT+8:00")
|
||||
}
|
||||
val e = intercept[IllegalArgumentException] {
|
||||
spark.conf.set(SQLConf.SESSION_LOCAL_TIMEZONE.key, "Asia/shanghai")
|
||||
}
|
||||
assert(e.getMessage === "Cannot resolve the given timezone with ZoneId.of(_, ZoneId.SHORT_IDS)")
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue