From 82a31508afffd089048e28276c75b5deb1ada47f Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Tue, 17 Aug 2021 12:27:56 +0300 Subject: [PATCH] [SPARK-36524][SQL] Common class for ANSI interval types ### What changes were proposed in this pull request? Add new type `AnsiIntervalType` to `AbstractDataType.scala`, and extend it by `YearMonthIntervalType` and by `DayTimeIntervalType` ### Why are the changes needed? To improve code maintenance. The change will allow to replace checking of both `YearMonthIntervalType` and `DayTimeIntervalType` by a check of `AnsiIntervalType`, for instance: ```scala case _: YearMonthIntervalType | _: DayTimeIntervalType => false ``` by ```scala case _: AnsiIntervalType => false ``` ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By existing test suites. Closes #33753 from MaxGekk/ansi-interval-type-trait. Authored-by: Max Gekk Signed-off-by: Max Gekk --- .../main/scala/org/apache/spark/sql/avro/AvroUtils.scala | 2 +- .../org/apache/spark/sql/catalyst/analysis/Analyzer.scala | 8 ++++---- .../spark/sql/catalyst/analysis/AnsiTypeCoercion.scala | 2 +- .../spark/sql/catalyst/analysis/CheckAnalysis.scala | 2 +- .../spark/sql/catalyst/expressions/arithmetic.scala | 4 ++-- .../sql/catalyst/expressions/collectionOperations.scala | 2 +- .../sql/catalyst/expressions/datetimeExpressions.scala | 2 +- .../org/apache/spark/sql/catalyst/util/TypeUtils.scala | 4 ++-- .../org/apache/spark/sql/types/AbstractDataType.scala | 5 +++++ .../org/apache/spark/sql/types/DayTimeIntervalType.scala | 2 +- .../apache/spark/sql/types/YearMonthIntervalType.scala | 2 +- .../sql/execution/datasources/csv/CSVFileFormat.scala | 2 +- .../sql/execution/datasources/json/JsonFileFormat.scala | 2 +- .../sql/execution/datasources/orc/OrcFileFormat.scala | 2 +- .../execution/datasources/parquet/ParquetFileFormat.scala | 2 +- .../spark/sql/execution/datasources/v2/csv/CSVTable.scala | 4 ++-- .../sql/execution/datasources/v2/json/JsonTable.scala | 2 +- .../spark/sql/execution/datasources/v2/orc/OrcTable.scala | 2 +- .../execution/datasources/v2/parquet/ParquetTable.scala | 2 +- .../thriftserver/SparkExecuteStatementOperation.scala | 2 +- .../sql/hive/thriftserver/SparkGetColumnsOperation.scala | 5 ++--- .../org/apache/spark/sql/hive/orc/OrcFileFormat.scala | 2 +- 22 files changed, 33 insertions(+), 29 deletions(-) diff --git a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala index 328927ffb3..f830bbcd56 100644 --- a/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala +++ b/external/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala @@ -71,7 +71,7 @@ private[sql] object AvroUtils extends Logging { } def supportsDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 3513601cfd..a727fa0f20 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -377,9 +377,9 @@ class Analyzer(override val catalogManager: CatalogManager) TimestampAddYMInterval(r, l) case (CalendarIntervalType, CalendarIntervalType) | (_: DayTimeIntervalType, _: DayTimeIntervalType) => a - case (_: NullType, _: DayTimeIntervalType | _: YearMonthIntervalType) => + case (_: NullType, _: AnsiIntervalType) => a.copy(left = Cast(a.left, a.right.dataType)) - case (_: DayTimeIntervalType | _: YearMonthIntervalType, _: NullType) => + case (_: AnsiIntervalType, _: NullType) => a.copy(right = Cast(a.right, a.left.dataType)) case (DateType, CalendarIntervalType) => DateAddInterval(l, r, ansiEnabled = f) case (_, CalendarIntervalType | _: DayTimeIntervalType) => Cast(TimeAdd(l, r), l.dataType) @@ -400,9 +400,9 @@ class Analyzer(override val catalogManager: CatalogManager) DatetimeSub(l, r, TimestampAddYMInterval(l, UnaryMinus(r, f))) case (CalendarIntervalType, CalendarIntervalType) | (_: DayTimeIntervalType, _: DayTimeIntervalType) => s - case (_: NullType, _: DayTimeIntervalType | _: YearMonthIntervalType) => + case (_: NullType, _: AnsiIntervalType) => s.copy(left = Cast(s.left, s.right.dataType)) - case (_: DayTimeIntervalType | _: YearMonthIntervalType, _: NullType) => + case (_: AnsiIntervalType, _: NullType) => s.copy(right = Cast(s.right, s.left.dataType)) case (DateType, CalendarIntervalType) => DatetimeSub(l, r, DateAddInterval(l, UnaryMinus(r, f), ansiEnabled = f)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala index f6d3ac29c3..e8bf2aeac1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala @@ -275,7 +275,7 @@ object AnsiTypeCoercion extends TypeCoercionBase { // If a binary operation contains interval type and string literal, we can't decide which // interval type the string literal should be promoted as. There are many possible interval // types, such as year interval, month interval, day interval, hour interval, etc. - case _: YearMonthIntervalType | _: DayTimeIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true case _ => false } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index d38327a3c9..932414e3fa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -981,7 +981,7 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog { case u: UserDefinedType[_] => alter.failAnalysis(s"Cannot update ${table.name} field $fieldName type: " + s"update a UserDefinedType[${u.sql}] by updating its fields") - case _: CalendarIntervalType | _: YearMonthIntervalType | _: DayTimeIntervalType => + case _: CalendarIntervalType | _: AnsiIntervalType => alter.failAnalysis(s"Cannot update ${table.name} field $fieldName to interval type") case _ => // update is okay } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index a1fdbb1b24..e5e4e3415c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -85,7 +85,7 @@ case class UnaryMinus( val iu = IntervalUtils.getClass.getCanonicalName.stripSuffix("$") val method = if (failOnError) "negateExact" else "negate" defineCodeGen(ctx, ev, c => s"$iu.$method($c)") - case _: DayTimeIntervalType | _: YearMonthIntervalType => + case _: AnsiIntervalType => nullSafeCodeGen(ctx, ev, eval => { val mathClass = classOf[Math].getName s"${ev.value} = $mathClass.negateExact($eval);" @@ -229,7 +229,7 @@ abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant { case CalendarIntervalType => val iu = IntervalUtils.getClass.getCanonicalName.stripSuffix("$") defineCodeGen(ctx, ev, (eval1, eval2) => s"$iu.$calendarIntervalMethod($eval1, $eval2)") - case _: DayTimeIntervalType | _: YearMonthIntervalType => + case _: AnsiIntervalType => assert(exactMathMethod.isDefined, s"The expression '$nodeName' must override the exactMathMethod() method " + "if it is supposed to operate over interval types.") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index 2c3312a826..6cbab86c5e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -2597,7 +2597,7 @@ case class Sequence( } private def isNotIntervalType(expr: Expression) = expr.dataType match { - case CalendarIntervalType | _: YearMonthIntervalType | _: DayTimeIntervalType => false + case CalendarIntervalType | _: AnsiIntervalType => false case _ => true } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 5d69bbcfa7..84dfb414e0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2733,7 +2733,7 @@ object DatePart { throw QueryCompilationErrors.literalTypeUnsupportedForSourceTypeError(fieldStr, source) source.dataType match { - case _: YearMonthIntervalType | _: DayTimeIntervalType | CalendarIntervalType => + case _: AnsiIntervalType | CalendarIntervalType => ExtractIntervalPart.parseExtractField(fieldStr, source, analysisException) case _ => DatePart.parseExtractField(fieldStr, source, analysisException) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala index 015dca844a..f4c0f3bbee 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala @@ -63,7 +63,7 @@ object TypeUtils { def checkForAnsiIntervalOrNumericType( dt: DataType, funcName: String): TypeCheckResult = dt match { - case _: YearMonthIntervalType | _: DayTimeIntervalType | NullType => + case _: AnsiIntervalType | NullType => TypeCheckResult.TypeCheckSuccess case dt if dt.isInstanceOf[NumericType] => TypeCheckResult.TypeCheckSuccess case other => TypeCheckResult.TypeCheckFailure( @@ -117,7 +117,7 @@ object TypeUtils { def invokeOnceForInterval(dataType: DataType)(f: => Unit): Unit = { def isInterval(dataType: DataType): Boolean = dataType match { - case CalendarIntervalType | _: DayTimeIntervalType | _: YearMonthIntervalType => true + case CalendarIntervalType | _: AnsiIntervalType => true case _ => false } if (dataType.existsRecursively(isInterval)) f diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala index 1b1791cf5d..f7e754e0e5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala @@ -222,3 +222,8 @@ private[sql] object AnyTimestampType extends AbstractDataType with Serializable def unapply(e: Expression): Boolean = acceptsType(e.dataType) } + +/** + * The interval type which conforms to the ANSI SQL standard. + */ +private[sql] abstract class AnsiIntervalType extends AtomicType diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala index 99aa5f1301..ca8a1f71bd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DayTimeIntervalType.scala @@ -42,7 +42,7 @@ import org.apache.spark.sql.types.DayTimeIntervalType.fieldToString * @since 3.2.0 */ @Unstable -case class DayTimeIntervalType(startField: Byte, endField: Byte) extends AtomicType { +case class DayTimeIntervalType(startField: Byte, endField: Byte) extends AnsiIntervalType { /** * Internally, values of day-time intervals are stored in `Long` values as amount of time in terms * of microseconds that are calculated by the formula: diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala index 04902e3c61..4d9168f6ec 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/YearMonthIntervalType.scala @@ -40,7 +40,7 @@ import org.apache.spark.sql.types.YearMonthIntervalType.fieldToString * @since 3.2.0 */ @Unstable -case class YearMonthIntervalType(startField: Byte, endField: Byte) extends AtomicType { +case class YearMonthIntervalType(startField: Byte, endField: Byte) extends AnsiIntervalType { /** * Internally, values of year-month intervals are stored in `Int` values as amount of months * that are calculated by the formula: diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVFileFormat.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVFileFormat.scala index c3a8a95477..8add63ccb0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVFileFormat.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVFileFormat.scala @@ -148,7 +148,7 @@ class CSVFileFormat extends TextBasedFileFormat with DataSourceRegister { override def equals(other: Any): Boolean = other.isInstanceOf[CSVFileFormat] override def supportDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala index 7ffeba4e4e..8357a411ad 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala @@ -134,7 +134,7 @@ class JsonFileFormat extends TextBasedFileFormat with DataSourceRegister { override def equals(other: Any): Boolean = other.isInstanceOf[JsonFileFormat] override def supportDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileFormat.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileFormat.scala index 939743adc0..79c54bc5aa 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileFormat.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileFormat.scala @@ -232,7 +232,7 @@ class OrcFileFormat } override def supportDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala index e2fe5b4486..d3ac077ccf 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala @@ -373,7 +373,7 @@ class ParquetFileFormat } override def supportDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala index e6299be20c..02601b3ae5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala @@ -26,7 +26,7 @@ import org.apache.spark.sql.connector.write.{LogicalWriteInfo, Write, WriteBuild import org.apache.spark.sql.execution.datasources.FileFormat import org.apache.spark.sql.execution.datasources.csv.CSVDataSource import org.apache.spark.sql.execution.datasources.v2.FileTable -import org.apache.spark.sql.types.{AtomicType, DataType, DayTimeIntervalType, StructType, UserDefinedType, YearMonthIntervalType} +import org.apache.spark.sql.types.{AnsiIntervalType, AtomicType, DataType, StructType, UserDefinedType} import org.apache.spark.sql.util.CaseInsensitiveStringMap case class CSVTable( @@ -55,7 +55,7 @@ case class CSVTable( } override def supportsDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala index 38277fdf03..244dd0f6a7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala @@ -55,7 +55,7 @@ case class JsonTable( } override def supportsDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala index bffc091b0f..628b0a1f5e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala @@ -49,7 +49,7 @@ case class OrcTable( } override def supportsDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala index 78797f7613..f22a258f1b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala @@ -49,7 +49,7 @@ case class ParquetTable( } override def supportsDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala index bfff5d7217..f43f8e7eba 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala @@ -121,7 +121,7 @@ private[hive] class SparkExecuteStatementOperation( false, timeFormatters) case _: ArrayType | _: StructType | _: MapType | _: UserDefinedType[_] | - _: YearMonthIntervalType | _: DayTimeIntervalType | _: TimestampNTZType => + _: AnsiIntervalType | _: TimestampNTZType => to += toHiveString((from.get(ordinal), dataTypes(ordinal)), false, timeFormatters) } } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala index 40beceaad2..a455d50f12 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala @@ -131,8 +131,7 @@ private[hive] class SparkGetColumnsOperation( */ private def getColumnSize(typ: DataType): Option[Int] = typ match { case dt @ (BooleanType | _: NumericType | DateType | TimestampType | TimestampNTZType | - CalendarIntervalType | NullType | - _: YearMonthIntervalType | _: DayTimeIntervalType) => + CalendarIntervalType | NullType | _: AnsiIntervalType) => Some(dt.defaultSize) case CharType(n) => Some(n) case StructType(fields) => @@ -187,7 +186,7 @@ private[hive] class SparkGetColumnsOperation( case _: MapType => java.sql.Types.JAVA_OBJECT case _: StructType => java.sql.Types.STRUCT // Hive's year-month and day-time intervals are mapping to java.sql.Types.OTHER - case _: CalendarIntervalType | _: YearMonthIntervalType | _: DayTimeIntervalType => + case _: CalendarIntervalType | _: AnsiIntervalType => java.sql.Types.OTHER case _ => throw new IllegalArgumentException(s"Unrecognized type name: ${typ.sql}") } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala index 2ca1eb3ddd..40924ecbc9 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala @@ -194,7 +194,7 @@ class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable } override def supportDataType(dataType: DataType): Boolean = dataType match { - case _: DayTimeIntervalType | _: YearMonthIntervalType => false + case _: AnsiIntervalType => false case _: AtomicType => true