[SPARK-29614][SQL][TEST] Fix failures of DateTimeUtilsSuite and TimestampFormatterSuite

### What changes were proposed in this pull request?
The `DateTimeUtilsSuite` and `TimestampFormatterSuite` assume constant time difference between `timestamp'yesterday'`, `timestamp'today'` and `timestamp'tomorrow'` which is wrong on daylight switching day - day length can be 23 or 25 hours. In the PR, I propose to use Java 8 time API to calculate instances of `yesterday` and `tomorrow` timestamps.

### Why are the changes needed?
The changes fix test failures and make the tests tolerant to daylight time switching.

### Does this PR introduce any user-facing change?
No

### How was this patch tested?
By existing test suites `DateTimeUtilsSuite` and `TimestampFormatterSuite`.

Closes #26273 from MaxGekk/midnight-tolerant.

Authored-by: Maxim Gekk <max.gekk@gmail.com>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
This commit is contained in:
Maxim Gekk 2019-10-27 13:48:43 -07:00 committed by Dongjoon Hyun
parent 077fb99a26
commit 74514b46e5
2 changed files with 15 additions and 9 deletions

View file

@ -586,12 +586,15 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers {
val now = instantToMicros(LocalDateTime.now(zoneId).atZone(zoneId).toInstant)
toTimestamp("NOW", zoneId).get should be (now +- tolerance)
assert(toTimestamp("now UTC", zoneId) === None)
val today = instantToMicros(LocalDateTime.now(zoneId)
val localToday = LocalDateTime.now(zoneId)
.`with`(LocalTime.MIDNIGHT)
.atZone(zoneId).toInstant)
toTimestamp(" Yesterday", zoneId).get should be (today - MICROS_PER_DAY +- tolerance)
.atZone(zoneId)
val yesterday = instantToMicros(localToday.minusDays(1).toInstant)
toTimestamp(" Yesterday", zoneId).get should be (yesterday +- tolerance)
val today = instantToMicros(localToday.toInstant)
toTimestamp("Today ", zoneId).get should be (today +- tolerance)
toTimestamp(" tomorrow CET ", zoneId).get should be (today + MICROS_PER_DAY +- tolerance)
val tomorrow = instantToMicros(localToday.plusDays(1).toInstant)
toTimestamp(" tomorrow CET ", zoneId).get should be (tomorrow +- tolerance)
}
}

View file

@ -25,7 +25,7 @@ import org.scalatest.Matchers
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.plans.SQLHelper
import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils, TimestampFormatter}
import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getZoneId, instantToMicros, MICROS_PER_DAY}
import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getZoneId, instantToMicros}
import org.apache.spark.sql.internal.SQLConf
class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers {
@ -146,12 +146,15 @@ class TimestampFormatterSuite extends SparkFunSuite with SQLHelper with Matchers
assert(formatter.parse("EPOCH") === 0)
val now = instantToMicros(LocalDateTime.now(zoneId).atZone(zoneId).toInstant)
formatter.parse("now") should be (now +- tolerance)
val today = instantToMicros(LocalDateTime.now(zoneId)
val localToday = LocalDateTime.now(zoneId)
.`with`(LocalTime.MIDNIGHT)
.atZone(zoneId).toInstant)
formatter.parse("yesterday CET") should be (today - MICROS_PER_DAY +- tolerance)
.atZone(zoneId)
val yesterday = instantToMicros(localToday.minusDays(1).toInstant)
formatter.parse("yesterday CET") should be (yesterday +- tolerance)
val today = instantToMicros(localToday.toInstant)
formatter.parse(" TODAY ") should be (today +- tolerance)
formatter.parse("Tomorrow ") should be (today + MICROS_PER_DAY +- tolerance)
val tomorrow = instantToMicros(localToday.plusDays(1).toInstant)
formatter.parse("Tomorrow ") should be (tomorrow +- tolerance)
}
}
}