[SPARK-36043][SQL][TESTS] Add end-to-end tests with default timestamp type as TIMESTAMP_NTZ

### What changes were proposed in this pull request?

Run end-to-end tests with default timestamp type as TIMESTAMP_NTZ to increase test coverage.

### Why are the changes needed?

Inrease test coverage.
Also, there will be more and more expressions have different behaviors when the default timestamp type is TIMESTAMP_NTZ, for example, `to_timestamp`, `from_json`, `from_csv`, and so on. Having this new test suite helps future developments.

### Does this PR introduce _any_ user-facing change?

No

### How was this patch tested?

CI tests.

Closes #33259 from gengliangwang/ntzTest.

Authored-by: Gengliang Wang <gengliang@apache.org>
Signed-off-by: Gengliang Wang <gengliang@apache.org>
(cherry picked from commit 57342dfc1d)
Signed-off-by: Gengliang Wang <gengliang@apache.org>
This commit is contained in:
Gengliang Wang 2021-07-08 19:38:52 +08:00
parent 2776e8aa47
commit cafb829c42
4 changed files with 1617 additions and 0 deletions

View file

@ -0,0 +1 @@
--IMPORT datetime.sql

File diff suppressed because it is too large Load diff

View file

@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.{fileToString, stringToFile}
import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_SECOND import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_SECOND
import org.apache.spark.sql.execution.WholeStageCodegenExec import org.apache.spark.sql.execution.WholeStageCodegenExec
import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.TimestampTypes
import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.tags.ExtendedSQLTest import org.apache.spark.tags.ExtendedSQLTest
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
@ -186,6 +187,11 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
*/ */
protected trait AnsiTest protected trait AnsiTest
/**
* traits that indicate the default timestamp type is TimestampNTZType.
*/
protected trait TimestampNTZTest
protected trait UDFTest { protected trait UDFTest {
val udf: TestUDF val udf: TestUDF
} }
@ -216,6 +222,10 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
protected case class AnsiTestCase( protected case class AnsiTestCase(
name: String, inputFile: String, resultFile: String) extends TestCase with AnsiTest name: String, inputFile: String, resultFile: String) extends TestCase with AnsiTest
/** An date time test case with default timestamp as TimestampNTZType */
protected case class TimestampNTZTestCase(
name: String, inputFile: String, resultFile: String) extends TestCase with TimestampNTZTest
protected def createScalaTestCase(testCase: TestCase): Unit = { protected def createScalaTestCase(testCase: TestCase): Unit = {
if (ignoreList.exists(t => if (ignoreList.exists(t =>
testCase.name.toLowerCase(Locale.ROOT).contains(t.toLowerCase(Locale.ROOT)))) { testCase.name.toLowerCase(Locale.ROOT).contains(t.toLowerCase(Locale.ROOT)))) {
@ -370,6 +380,9 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
localSparkSession.conf.set(SQLConf.LEGACY_INTERVAL_ENABLED.key, true) localSparkSession.conf.set(SQLConf.LEGACY_INTERVAL_ENABLED.key, true)
case _: AnsiTest => case _: AnsiTest =>
localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true)
case _: TimestampNTZTest =>
localSparkSession.conf.set(SQLConf.TIMESTAMP_TYPE.key,
TimestampTypes.TIMESTAMP_NTZ.toString)
case _ => case _ =>
} }
@ -481,6 +494,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) { } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) {
AnsiTestCase(testCaseName, absPath, resultFile) :: Nil AnsiTestCase(testCaseName, absPath, resultFile) :: Nil
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) {
TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil
} else { } else {
RegularTestCase(testCaseName, absPath, resultFile) :: Nil RegularTestCase(testCaseName, absPath, resultFile) :: Nil
} }

View file

@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.util.fileToString import org.apache.spark.sql.catalyst.util.fileToString
import org.apache.spark.sql.execution.HiveResult.{getTimeFormatters, toHiveString, TimeFormatters} import org.apache.spark.sql.execution.HiveResult.{getTimeFormatters, toHiveString, TimeFormatters}
import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.TimestampTypes
import org.apache.spark.sql.types._ import org.apache.spark.sql.types._
// scalastyle:off line.size.limit // scalastyle:off line.size.limit
@ -112,6 +113,9 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ
statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true") statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true")
case _: AnsiTest => case _: AnsiTest =>
statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true")
case _: TimestampNTZTest =>
statement.execute(s"SET ${SQLConf.TIMESTAMP_TYPE.key} = " +
s"${TimestampTypes.TIMESTAMP_NTZ.toString}")
case _ => case _ =>
statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false") statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false")
} }
@ -244,6 +248,8 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ
PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) { } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) {
AnsiTestCase(testCaseName, absPath, resultFile) :: Nil AnsiTestCase(testCaseName, absPath, resultFile) :: Nil
} else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) {
TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil
} else { } else {
RegularTestCase(testCaseName, absPath, resultFile) :: Nil RegularTestCase(testCaseName, absPath, resultFile) :: Nil
} }