[SPARK-36120][SQL] Support TimestampNTZ type in cache table
### What changes were proposed in this pull request?
Support TimestampNTZ type column in SQL command Cache table
### Why are the changes needed?
Cache table should support the new timestamp type.
### Does this PR introduce _any_ user-facing change?
Yes, the TimemstampNTZ type column can used in `CACHE TABLE`
### How was this patch tested?
Unit test
Closes #33322 from gengliangwang/cacheTable.
Authored-by: Gengliang Wang <gengliang@apache.org>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
(cherry picked from commit 067432705f
)
Signed-off-by: Max Gekk <max.gekk@gmail.com>
This commit is contained in:
parent
a0f61ccfe4
commit
3ace01b25b
|
@ -175,7 +175,8 @@ private[columnar] object ColumnBuilder {
|
||||||
case ByteType => new ByteColumnBuilder
|
case ByteType => new ByteColumnBuilder
|
||||||
case ShortType => new ShortColumnBuilder
|
case ShortType => new ShortColumnBuilder
|
||||||
case IntegerType | DateType | _: YearMonthIntervalType => new IntColumnBuilder
|
case IntegerType | DateType | _: YearMonthIntervalType => new IntColumnBuilder
|
||||||
case LongType | TimestampType | _: DayTimeIntervalType => new LongColumnBuilder
|
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
|
||||||
|
new LongColumnBuilder
|
||||||
case FloatType => new FloatColumnBuilder
|
case FloatType => new FloatColumnBuilder
|
||||||
case DoubleType => new DoubleColumnBuilder
|
case DoubleType => new DoubleColumnBuilder
|
||||||
case StringType => new StringColumnBuilder
|
case StringType => new StringColumnBuilder
|
||||||
|
|
|
@ -818,7 +818,7 @@ private[columnar] object ColumnType {
|
||||||
case ByteType => BYTE
|
case ByteType => BYTE
|
||||||
case ShortType => SHORT
|
case ShortType => SHORT
|
||||||
case IntegerType | DateType | _: YearMonthIntervalType => INT
|
case IntegerType | DateType | _: YearMonthIntervalType => INT
|
||||||
case LongType | TimestampType | _: DayTimeIntervalType => LONG
|
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType => LONG
|
||||||
case FloatType => FLOAT
|
case FloatType => FLOAT
|
||||||
case DoubleType => DOUBLE
|
case DoubleType => DOUBLE
|
||||||
case StringType => STRING
|
case StringType => STRING
|
||||||
|
|
|
@ -81,7 +81,7 @@ object GenerateColumnAccessor extends CodeGenerator[Seq[DataType], ColumnarItera
|
||||||
case ByteType => classOf[ByteColumnAccessor].getName
|
case ByteType => classOf[ByteColumnAccessor].getName
|
||||||
case ShortType => classOf[ShortColumnAccessor].getName
|
case ShortType => classOf[ShortColumnAccessor].getName
|
||||||
case IntegerType | DateType | _: YearMonthIntervalType => classOf[IntColumnAccessor].getName
|
case IntegerType | DateType | _: YearMonthIntervalType => classOf[IntColumnAccessor].getName
|
||||||
case LongType | TimestampType | _: DayTimeIntervalType =>
|
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
|
||||||
classOf[LongColumnAccessor].getName
|
classOf[LongColumnAccessor].getName
|
||||||
case FloatType => classOf[FloatColumnAccessor].getName
|
case FloatType => classOf[FloatColumnAccessor].getName
|
||||||
case DoubleType => classOf[DoubleColumnAccessor].getName
|
case DoubleType => classOf[DoubleColumnAccessor].getName
|
||||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.spark.sql
|
||||||
|
|
||||||
import java.io.{File, FilenameFilter}
|
import java.io.{File, FilenameFilter}
|
||||||
import java.nio.file.{Files, Paths}
|
import java.nio.file.{Files, Paths}
|
||||||
import java.time.{Duration, Period}
|
import java.time.{Duration, LocalDateTime, Period}
|
||||||
|
|
||||||
import scala.collection.mutable.HashSet
|
import scala.collection.mutable.HashSet
|
||||||
import scala.concurrent.duration._
|
import scala.concurrent.duration._
|
||||||
|
@ -1538,6 +1538,19 @@ class CachedTableSuite extends QueryTest with SQLTestUtils
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test("SPARK-36120: Support cache/uncache table with TimestampNTZ type") {
|
||||||
|
val tableName = "ntzCache"
|
||||||
|
withTable(tableName) {
|
||||||
|
sql(s"CACHE TABLE $tableName AS SELECT TIMESTAMP_NTZ'2021-01-01 00:00:00'")
|
||||||
|
checkAnswer(spark.table(tableName), Row(LocalDateTime.parse("2021-01-01T00:00:00")))
|
||||||
|
spark.table(tableName).queryExecution.withCachedData.collect {
|
||||||
|
case cached: InMemoryRelation =>
|
||||||
|
assert(cached.stats.sizeInBytes === 8)
|
||||||
|
}
|
||||||
|
sql(s"UNCACHE TABLE $tableName")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private def testCreateTemporaryViewUsingWithCache(ident: TableIdentifier): Unit = {
|
private def testCreateTemporaryViewUsingWithCache(ident: TableIdentifier): Unit = {
|
||||||
withTempDir { dir =>
|
withTempDir { dir =>
|
||||||
val path1 = new File(dir, "t1").getCanonicalPath
|
val path1 = new File(dir, "t1").getCanonicalPath
|
||||||
|
|
Loading…
Reference in a new issue