[SPARK-36043][SQL][TESTS] Add end-to-end tests with default timestamp type as TIMESTAMP_NTZ
### What changes were proposed in this pull request?
Run end-to-end tests with default timestamp type as TIMESTAMP_NTZ to increase test coverage.
### Why are the changes needed?
Inrease test coverage.
Also, there will be more and more expressions have different behaviors when the default timestamp type is TIMESTAMP_NTZ, for example, `to_timestamp`, `from_json`, `from_csv`, and so on. Having this new test suite helps future developments.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
CI tests.
Closes #33259 from gengliangwang/ntzTest.
Authored-by: Gengliang Wang <gengliang@apache.org>
Signed-off-by: Gengliang Wang <gengliang@apache.org>
(cherry picked from commit 57342dfc1d
)
Signed-off-by: Gengliang Wang <gengliang@apache.org>
This commit is contained in:
parent
2776e8aa47
commit
cafb829c42
|
@ -0,0 +1 @@
|
|||
--IMPORT datetime.sql
|
File diff suppressed because it is too large
Load diff
|
@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.{fileToString, stringToFile}
|
|||
import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_SECOND
|
||||
import org.apache.spark.sql.execution.WholeStageCodegenExec
|
||||
import org.apache.spark.sql.internal.SQLConf
|
||||
import org.apache.spark.sql.internal.SQLConf.TimestampTypes
|
||||
import org.apache.spark.sql.test.SharedSparkSession
|
||||
import org.apache.spark.tags.ExtendedSQLTest
|
||||
import org.apache.spark.util.Utils
|
||||
|
@ -186,6 +187,11 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
|
|||
*/
|
||||
protected trait AnsiTest
|
||||
|
||||
/**
|
||||
* traits that indicate the default timestamp type is TimestampNTZType.
|
||||
*/
|
||||
protected trait TimestampNTZTest
|
||||
|
||||
protected trait UDFTest {
|
||||
val udf: TestUDF
|
||||
}
|
||||
|
@ -216,6 +222,10 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
|
|||
protected case class AnsiTestCase(
|
||||
name: String, inputFile: String, resultFile: String) extends TestCase with AnsiTest
|
||||
|
||||
/** An date time test case with default timestamp as TimestampNTZType */
|
||||
protected case class TimestampNTZTestCase(
|
||||
name: String, inputFile: String, resultFile: String) extends TestCase with TimestampNTZTest
|
||||
|
||||
protected def createScalaTestCase(testCase: TestCase): Unit = {
|
||||
if (ignoreList.exists(t =>
|
||||
testCase.name.toLowerCase(Locale.ROOT).contains(t.toLowerCase(Locale.ROOT)))) {
|
||||
|
@ -370,6 +380,9 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
|
|||
localSparkSession.conf.set(SQLConf.LEGACY_INTERVAL_ENABLED.key, true)
|
||||
case _: AnsiTest =>
|
||||
localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true)
|
||||
case _: TimestampNTZTest =>
|
||||
localSparkSession.conf.set(SQLConf.TIMESTAMP_TYPE.key,
|
||||
TimestampTypes.TIMESTAMP_NTZ.toString)
|
||||
case _ =>
|
||||
}
|
||||
|
||||
|
@ -481,6 +494,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
|
|||
PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) {
|
||||
AnsiTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) {
|
||||
TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
} else {
|
||||
RegularTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
|
|||
import org.apache.spark.sql.catalyst.util.fileToString
|
||||
import org.apache.spark.sql.execution.HiveResult.{getTimeFormatters, toHiveString, TimeFormatters}
|
||||
import org.apache.spark.sql.internal.SQLConf
|
||||
import org.apache.spark.sql.internal.SQLConf.TimestampTypes
|
||||
import org.apache.spark.sql.types._
|
||||
|
||||
// scalastyle:off line.size.limit
|
||||
|
@ -112,6 +113,9 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ
|
|||
statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true")
|
||||
case _: AnsiTest =>
|
||||
statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true")
|
||||
case _: TimestampNTZTest =>
|
||||
statement.execute(s"SET ${SQLConf.TIMESTAMP_TYPE.key} = " +
|
||||
s"${TimestampTypes.TIMESTAMP_NTZ.toString}")
|
||||
case _ =>
|
||||
statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false")
|
||||
}
|
||||
|
@ -244,6 +248,8 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ
|
|||
PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) {
|
||||
AnsiTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) {
|
||||
TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
} else {
|
||||
RegularTestCase(testCaseName, absPath, resultFile) :: Nil
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue