[SPARK-36107][SQL] Refactor first set of 20 query execution errors to use error classes

### What changes were proposed in this pull request?
Refactor some exceptions in QueryExecutionErrors to use error classes. as follows:
```
columnChangeUnsupportedError
logicalHintOperatorNotRemovedDuringAnalysisError
cannotEvaluateExpressionError
cannotGenerateCodeForExpressionError
cannotTerminateGeneratorError
castingCauseOverflowError
cannotChangeDecimalPrecisionError
invalidInputSyntaxForNumericError
cannotCastFromNullTypeError
cannotCastError
cannotParseDecimalError
simpleStringWithNodeIdUnsupportedError
evaluateUnevaluableAggregateUnsupportedError
dataTypeUnsupportedError
dataTypeUnsupportedError
failedExecuteUserDefinedFunctionError
divideByZeroError
invalidArrayIndexError
mapKeyNotExistError
rowFromCSVParserNotExpectedError
```

### Why are the changes needed?
[SPARK-36107](https://issues.apache.org/jira/browse/SPARK-36107)

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
Existed UT Testcase

Closes #33538 from Peng-Lei/SPARK-36017.

Lead-authored-by: PengLei <peng.8lei@gmail.com>
Co-authored-by: Lei Peng <peng.8lei@gmail.com>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
This commit is contained in:
PengLei 2021-09-20 10:34:19 +09:00 committed by Hyukjin Kwon
parent cabc36b54d
commit c2881c5ee2
11 changed files with 211 additions and 59 deletions

View file

@ -3,6 +3,31 @@
"message" : [ "Field name %s is ambiguous and has %s matching fields in the struct." ],
"sqlState" : "42000"
},
"CANNOT_CAST_DATATYPE" : {
"message" : [ "Cannot cast %s to %s." ],
"sqlState" : "22005"
},
"CANNOT_CHANGE_DECIMAL_PRECISION" : {
"message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
"sqlState" : "22005"
},
"CANNOT_EVALUATE_EXPRESSION" : {
"message" : [ "Cannot evaluate expression: %s" ]
},
"CANNOT_GENERATE_CODE_FOR_EXPRESSION" : {
"message" : [ "Cannot generate code for expression: %s" ]
},
"CANNOT_PARSE_DECIMAL" : {
"message" : [ "Cannot parse decimal" ],
"sqlState" : "42000"
},
"CANNOT_TERMINATE_GENERATOR" : {
"message" : [ "Cannot terminate expression: %s" ]
},
"CAST_CAUSES_OVERFLOW" : {
"message" : [ "Casting %s to %s causes overflow" ],
"sqlState" : "22005"
},
"CONCURRENT_QUERY" : {
"message" : [ "Another instance of this query was just started by a concurrent session." ]
},
@ -14,6 +39,9 @@
"message" : [ "Found duplicate keys '%s'" ],
"sqlState" : "23000"
},
"FAILED_EXECUTE_UDF" : {
"message" : [ "Failed to execute user defined function (%s: (%s) => %s)" ]
},
"FAILED_RENAME_PATH" : {
"message" : [ "Failed to rename %s to %s as destination already exists" ],
"sqlState" : "22023"
@ -46,6 +74,9 @@
"message" : [ "Index %s must be between 0 and the length of the ArrayData." ],
"sqlState" : "22023"
},
"INVALID_ARRAY_INDEX" : {
"message" : [ "Invalid index: %s, numElements: %s" ]
},
"INVALID_FIELD_NAME" : {
"message" : [ "Field name %s is invalid: %s is not a struct." ],
"sqlState" : "42000"
@ -54,9 +85,19 @@
"message" : [ "The fraction of sec must be zero. Valid range is [0, 60]." ],
"sqlState" : "22023"
},
"INVALID_INPUT_SYNTAX_FOR_NUMERIC_TYPE" : {
"message" : [ "invalid input syntax for type numeric: %s" ],
"sqlState" : "42000"
},
"INVALID_JSON_SCHEMA_MAPTYPE" : {
"message" : [ "Input schema %s can only contain StringType as a key type for a MapType." ]
},
"LOGICAL_HINT_OPERATOR_NOT_REMOVED_DURING_ANALYSIS" : {
"message" : [ "Internal error: logical hint operator should have been removed during analysis" ]
},
"MAP_KEY_DOES_NOT_EXIST" : {
"message" : [ "Key %s does not exist." ]
},
"MISSING_COLUMN" : {
"message" : [ "cannot resolve '%s' given input columns: [%s]" ],
"sqlState" : "42000"
@ -85,6 +126,10 @@
"message" : [ "Failed to rename as %s was not found" ],
"sqlState" : "22023"
},
"ROW_FROM_CSV_PARSER_NOT_EXPECTED" : {
"message" : [ "Expected one row from CSV parser." ],
"sqlState" : "42000"
},
"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER" : {
"message" : [ "The second argument of '%s' function needs to be an integer." ],
"sqlState" : "22023"
@ -96,10 +141,21 @@
"message" : [ "Unrecognized SQL type %s" ],
"sqlState" : "42000"
},
"UNSUPPORTED_CHANGE_COLUMN" : {
"message" : [ "Please add an implementation for a column change here" ],
"sqlState" : "0A000"
},
"UNSUPPORTED_DATATYPE" : {
"message" : [ "Unsupported data type %s" ],
"sqlState" : "0A000"
},
"UNSUPPORTED_LITERAL_TYPE" : {
"message" : [ "Unsupported literal type %s %s" ],
"sqlState" : "0A000"
},
"UNSUPPORTED_SIMPLE_STRING_WITH_NODE_ID" : {
"message" : [ "%s does not implement simpleStringWithNodeId" ]
},
"UNSUPPORTED_TRANSACTION_BY_JDBC_SERVER" : {
"message" : [ "The target JDBC server does not support transaction and can only support ALTER TABLE with a single action." ],
"sqlState" : "0A000"

View file

@ -87,6 +87,19 @@ private[spark] class SparkArithmeticException(errorClass: String, messageParamet
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* Unsupported operation exception thrown from Spark with an error class.
*/
private[spark] class SparkUnsupportedOperationException(
errorClass: String,
messageParameters: Array[String])
extends UnsupportedOperationException(
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
override def getErrorClass: String = errorClass
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* Class not found exception thrown from Spark with an error class.
*/
@ -139,6 +152,19 @@ private[spark] class SparkFileAlreadyExistsException(
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* Illegal state exception thrown from Spark with an error class.
*/
private[spark] class SparkIllegalStateException(
errorClass: String,
messageParameters: Array[String])
extends IllegalStateException(
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
override def getErrorClass: String = errorClass
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* File not found exception thrown from Spark with an error class.
*/
@ -152,6 +178,19 @@ private[spark] class SparkFileNotFoundException(
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* Number format exception thrown from Spark with an error class.
*/
private[spark] class SparkNumberFormatException(
errorClass: String,
messageParameters: Array[String])
extends NumberFormatException(
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
override def getErrorClass: String = errorClass
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* No such method exception thrown from Spark with an error class.
*/
@ -165,6 +204,19 @@ private[spark] class SparkNoSuchMethodException(
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* Illegal argument exception thrown from Spark with an error class.
*/
private[spark] class SparkIllegalArgumentException(
errorClass: String,
messageParameters: Array[String])
extends IllegalArgumentException(
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
override def getErrorClass: String = errorClass
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* Index out of bounds exception thrown from Spark with an error class.
*/
@ -215,6 +267,19 @@ private[spark] class SparkSecurityException(
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* Array index out of bounds exception thrown from Spark with an error class.
*/
private[spark] class SparkArrayIndexOutOfBoundsException(
errorClass: String,
messageParameters: Array[String])
extends ArrayIndexOutOfBoundsException(
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
override def getErrorClass: String = errorClass
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* SQL exception thrown from Spark with an error class.
*/
@ -228,6 +293,19 @@ private[spark] class SparkSQLException(
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* No such element exception thrown from Spark with an error class.
*/
private[spark] class SparkNoSuchElementException(
errorClass: String,
messageParameters: Array[String])
extends NoSuchElementException(
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
override def getErrorClass: String = errorClass
override def getSqlState: String = SparkThrowableHelper.getSqlState(errorClass)
}
/**
* SQL feature not supported exception thrown from Spark with an error class.
*/

View file

@ -32,7 +32,7 @@ import org.apache.hadoop.fs.permission.FsPermission
import org.codehaus.commons.compiler.CompileException
import org.codehaus.janino.InternalCompilerException
import org.apache.spark.{Partition, SparkArithmeticException, SparkClassNotFoundException, SparkConcurrentModificationException, SparkDateTimeException, SparkException, SparkFileAlreadyExistsException, SparkFileNotFoundException, SparkIndexOutOfBoundsException, SparkNoSuchMethodException, SparkRuntimeException, SparkSecurityException, SparkSQLException, SparkSQLFeatureNotSupportedException, SparkUpgradeException}
import org.apache.spark.{Partition, SparkArithmeticException, SparkArrayIndexOutOfBoundsException, SparkClassNotFoundException, SparkConcurrentModificationException, SparkDateTimeException, SparkException, SparkFileAlreadyExistsException, SparkFileNotFoundException, SparkIllegalArgumentException, SparkIllegalStateException, SparkIndexOutOfBoundsException, SparkNoSuchElementException, SparkNoSuchMethodException, SparkNumberFormatException, SparkRuntimeException, SparkSecurityException, SparkSQLException, SparkSQLFeatureNotSupportedException, SparkUnsupportedOperationException, SparkUpgradeException}
import org.apache.spark.executor.CommitDeniedException
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.memory.SparkOutOfMemoryError
@ -67,73 +67,87 @@ import org.apache.spark.util.CircularBuffer
object QueryExecutionErrors {
def columnChangeUnsupportedError(): Throwable = {
new UnsupportedOperationException("Please add an implementation for a column change here")
new SparkUnsupportedOperationException(errorClass = "UNSUPPORTED_CHANGE_COLUMN",
messageParameters = Array.empty)
}
def logicalHintOperatorNotRemovedDuringAnalysisError(): Throwable = {
new IllegalStateException(
"Internal error: logical hint operator should have been removed during analysis")
new SparkIllegalStateException(errorClass = "LOGICAL_HINT_OPERATOR_NOT_REMOVED_DURING_ANALYSIS",
messageParameters = Array.empty)
}
def cannotEvaluateExpressionError(expression: Expression): Throwable = {
new UnsupportedOperationException(s"Cannot evaluate expression: $expression")
new SparkUnsupportedOperationException(errorClass = "CANNOT_EVALUATE_EXPRESSION",
messageParameters = Array("", expression.toString))
}
def cannotGenerateCodeForExpressionError(expression: Expression): Throwable = {
new UnsupportedOperationException(s"Cannot generate code for expression: $expression")
new SparkUnsupportedOperationException(errorClass = "CANNOT_GENERATE_CODE_FOR_EXPRESSION",
messageParameters = Array(expression.toString))
}
def cannotTerminateGeneratorError(generator: UnresolvedGenerator): Throwable = {
new UnsupportedOperationException(s"Cannot terminate expression: $generator")
new SparkUnsupportedOperationException(errorClass = "CANNOT_TERMINATE_GENERATOR",
messageParameters = Array(generator.toString))
}
def castingCauseOverflowError(t: Any, targetType: String): ArithmeticException = {
new ArithmeticException(s"Casting $t to $targetType causes overflow")
new SparkArithmeticException (errorClass = "CAST_CAUSES_OVERFLOW",
messageParameters = Array(t.toString, targetType))
}
def cannotChangeDecimalPrecisionError(
value: Decimal, decimalPrecision: Int, decimalScale: Int): ArithmeticException = {
new ArithmeticException(s"${value.toDebugString} cannot be represented as " +
s"Decimal($decimalPrecision, $decimalScale).")
new SparkArithmeticException(errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION",
messageParameters = Array(value.toDebugString,
decimalPrecision.toString, decimalScale.toString))
}
def invalidInputSyntaxForNumericError(s: UTF8String): NumberFormatException = {
new NumberFormatException(s"invalid input syntax for type numeric: $s")
new SparkNumberFormatException(errorClass = "INVALID_INPUT_SYNTAX_FOR_NUMERIC_TYPE",
messageParameters = Array(s.toString))
}
def cannotCastFromNullTypeError(to: DataType): Throwable = {
new SparkException(s"should not directly cast from NullType to $to.")
new SparkException(errorClass = "CANNOT_CAST_DATATYPE",
messageParameters = Array(NullType.typeName, to.typeName), null)
}
def cannotCastError(from: DataType, to: DataType): Throwable = {
new SparkException(s"Cannot cast $from to $to.")
new SparkException(errorClass = "CANNOT_CAST_DATATYPE",
messageParameters = Array(from.typeName, to.typeName), null)
}
def cannotParseDecimalError(): Throwable = {
new IllegalArgumentException("Cannot parse any decimal")
new SparkIllegalStateException(errorClass = "CANNOT_PARSE_DECIMAL",
messageParameters = Array.empty)
}
def simpleStringWithNodeIdUnsupportedError(nodeName: String): Throwable = {
new UnsupportedOperationException(s"$nodeName does not implement simpleStringWithNodeId")
new SparkUnsupportedOperationException(errorClass = "UNSUPPORTED_SIMPLE_STRING_WITH_NODE_ID",
messageParameters = Array(nodeName))
}
def evaluateUnevaluableAggregateUnsupportedError(
methodName: String, unEvaluable: UnevaluableAggregate): Throwable = {
new UnsupportedOperationException(s"Cannot evaluate $methodName: $unEvaluable")
new SparkUnsupportedOperationException(errorClass = "CANNOT_EVALUATE_EXPRESSION",
messageParameters = Array(methodName + ": " + unEvaluable.toString))
}
def dataTypeUnsupportedError(dt: DataType): Throwable = {
new SparkException(s"Unsupported data type $dt")
new SparkException(errorClass = "UNSUPPORTED_DATATYPE",
messageParameters = Array(dt.typeName), null)
}
def dataTypeUnsupportedError(dataType: String, failure: String): Throwable = {
new IllegalArgumentException(s"Unsupported dataType: $dataType, $failure")
new SparkIllegalArgumentException(errorClass = "UNSUPPORTED_DATATYPE",
messageParameters = Array(dataType + failure))
}
def failedExecuteUserDefinedFunctionError(funcCls: String, inputTypes: String,
outputType: String, e: Throwable): Throwable = {
new SparkException(
s"Failed to execute user defined function ($funcCls: ($inputTypes) => $outputType)", e)
new SparkException(errorClass = "FAILED_EXECUTE_UDF",
messageParameters = Array(funcCls, inputTypes, outputType), e)
}
def divideByZeroError(): ArithmeticException = {
@ -141,15 +155,18 @@ object QueryExecutionErrors {
}
def invalidArrayIndexError(index: Int, numElements: Int): ArrayIndexOutOfBoundsException = {
new ArrayIndexOutOfBoundsException(s"Invalid index: $index, numElements: $numElements")
new SparkArrayIndexOutOfBoundsException(errorClass = "INVALID_ARRAY_INDEX",
messageParameters = Array(index.toString, numElements.toString))
}
def mapKeyNotExistError(key: Any): NoSuchElementException = {
new NoSuchElementException(s"Key $key does not exist.")
new SparkNoSuchElementException(errorClass = "MAP_KEY_DOES_NOT_EXIST",
messageParameters = Array(key.toString))
}
def rowFromCSVParserNotExpectedError(): Throwable = {
new IllegalArgumentException("Expected one row from CSV parser.")
new SparkIllegalArgumentException(errorClass = "ROW_FROM_CSV_PARSER_NOT_EXPECTED",
messageParameters = Array.empty)
}
def inputTypeUnsupportedError(dataType: DataType): Throwable = {

View file

@ -24,6 +24,7 @@ import java.util.Arrays
import scala.collection.mutable.ArrayBuffer
import scala.reflect.runtime.universe.TypeTag
import org.apache.spark.SparkArithmeticException
import org.apache.spark.sql.{Encoder, Encoders}
import org.apache.spark.sql.catalyst.{FooClassWithEnum, FooEnum, OptionalData, PrimitiveData}
import org.apache.spark.sql.catalyst.analysis.AnalysisTest
@ -614,7 +615,7 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes
toRow(bigNumeric)
}
assert(e.getMessage.contains("Error while encoding"))
assert(e.getCause.getClass === classOf[ArithmeticException])
assert(e.getCause.getClass === classOf[SparkArithmeticException])
}
}
}

View file

@ -167,7 +167,7 @@ select element_at(array(1, 2, 3), 5)
-- !query schema
struct<>
-- !query output
java.lang.ArrayIndexOutOfBoundsException
org.apache.spark.SparkArrayIndexOutOfBoundsException
Invalid index: 5, numElements: 3
@ -176,7 +176,7 @@ select element_at(array(1, 2, 3), -5)
-- !query schema
struct<>
-- !query output
java.lang.ArrayIndexOutOfBoundsException
org.apache.spark.SparkArrayIndexOutOfBoundsException
Invalid index: -5, numElements: 3
@ -194,7 +194,7 @@ select elt(4, '123', '456')
-- !query schema
struct<>
-- !query output
java.lang.ArrayIndexOutOfBoundsException
org.apache.spark.SparkArrayIndexOutOfBoundsException
Invalid index: 4, numElements: 2
@ -203,7 +203,7 @@ select elt(0, '123', '456')
-- !query schema
struct<>
-- !query output
java.lang.ArrayIndexOutOfBoundsException
org.apache.spark.SparkArrayIndexOutOfBoundsException
Invalid index: 0, numElements: 2
@ -212,7 +212,7 @@ select elt(-1, '123', '456')
-- !query schema
struct<>
-- !query output
java.lang.ArrayIndexOutOfBoundsException
org.apache.spark.SparkArrayIndexOutOfBoundsException
Invalid index: -1, numElements: 2
@ -221,7 +221,7 @@ select array(1, 2, 3)[5]
-- !query schema
struct<>
-- !query output
java.lang.ArrayIndexOutOfBoundsException
org.apache.spark.SparkArrayIndexOutOfBoundsException
Invalid index: 5, numElements: 3
@ -230,5 +230,5 @@ select array(1, 2, 3)[-1]
-- !query schema
struct<>
-- !query output
java.lang.ArrayIndexOutOfBoundsException
org.apache.spark.SparkArrayIndexOutOfBoundsException
Invalid index: -1, numElements: 3

View file

@ -75,7 +75,7 @@ select (5e36BD + 0.1) + 5e36BD
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1).
@ -84,7 +84,7 @@ select (-4e36BD - 0.1) - 7e36BD
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1).
@ -93,7 +93,7 @@ select 12345678901234567890.0 * 12345678901234567890.0
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be represented as Decimal(38, 2).
@ -102,7 +102,7 @@ select 1e35BD / 0.1
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be represented as Decimal(38, 6).

View file

@ -121,7 +121,7 @@ select interval 2 second * 'a'
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: a
@ -130,7 +130,7 @@ select interval 2 second / 'a'
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: a
@ -139,7 +139,7 @@ select interval 2 year * 'a'
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: a
@ -148,7 +148,7 @@ select interval 2 year / 'a'
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: a
@ -173,7 +173,7 @@ select 'a' * interval 2 second
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: a
@ -182,7 +182,7 @@ select 'a' * interval 2 year
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: a

View file

@ -7,7 +7,7 @@ select element_at(map(1, 'a', 2, 'b'), 5)
-- !query schema
struct<>
-- !query output
java.util.NoSuchElementException
org.apache.spark.SparkNoSuchElementException
Key 5 does not exist.
@ -16,5 +16,5 @@ select map(1, 'a', 2, 'b')[5]
-- !query schema
struct<>
-- !query output
java.util.NoSuchElementException
org.apache.spark.SparkNoSuchElementException
Key 5 does not exist.

View file

@ -95,7 +95,7 @@ SELECT float('N A N')
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: N A N
@ -104,7 +104,7 @@ SELECT float('NaN x')
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: NaN x
@ -113,7 +113,7 @@ SELECT float(' INFINITY x')
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: INFINITY x
@ -146,7 +146,7 @@ SELECT float(decimal('nan'))
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: nan
@ -324,7 +324,7 @@ SELECT int(float('2147483647'))
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting 2.14748365E9 to int causes overflow
@ -341,7 +341,7 @@ SELECT int(float('-2147483900'))
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting -2.1474839E9 to int causes overflow
@ -374,7 +374,7 @@ SELECT bigint(float('-9223380000000000000'))
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting -9.22338E18 to int causes overflow

View file

@ -127,7 +127,7 @@ SELECT double('N A N')
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: N A N
@ -136,7 +136,7 @@ SELECT double('NaN x')
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: NaN x
@ -145,7 +145,7 @@ SELECT double(' INFINITY x')
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: INFINITY x
@ -178,7 +178,7 @@ SELECT double(decimal('nan'))
-- !query schema
struct<>
-- !query output
java.lang.NumberFormatException
org.apache.spark.SparkNumberFormatException
invalid input syntax for type numeric: nan
@ -832,7 +832,7 @@ SELECT bigint(double('-9223372036854780000'))
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting -9.22337203685478E18 to long causes overflow

View file

@ -606,7 +606,7 @@ SELECT CAST(q1 AS int) FROM int8_tbl WHERE q2 <> 456
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting 4567890123456789 to int causes overflow
@ -623,7 +623,7 @@ SELECT CAST(q1 AS smallint) FROM int8_tbl WHERE q2 <> 456
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting 4567890123456789 to smallint causes overflow
@ -660,7 +660,7 @@ SELECT CAST(double('922337203685477580700.0') AS bigint)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting 9.223372036854776E20 to long causes overflow
@ -732,7 +732,7 @@ SELECT string(int(shiftleft(bigint(-1), 63))+1)
-- !query schema
struct<>
-- !query output
java.lang.ArithmeticException
org.apache.spark.SparkArithmeticException
Casting -9223372036854775808 to int causes overflow