[SPARK-35060][SQL] Group exception messages in sql/types
### What changes were proposed in this pull request? This PR group exception messages in `sql/catalyst/src/main/scala/org/apache/spark/sql/types`. ### Why are the changes needed? It will largely help with standardization of error messages and its maintenance. ### Does this PR introduce _any_ user-facing change? No. Error messages remain unchanged. ### How was this patch tested? No new tests - pass all original tests to make sure it doesn't break any existing behavior. Closes #32244 from beliefer/SPARK-35060. Lead-authored-by: beliefer <beliefer@163.com> Co-authored-by: gengjiaan <gengjiaan@360.cn> Signed-off-by: Wenchen Fan <wenchen@databricks.com>
This commit is contained in:
parent
f0090463a8
commit
1b609c7dcf
|
@ -1352,4 +1352,9 @@ private[spark] object QueryCompilationErrors {
|
|||
s"Expected udfs have the same evalType but got different evalTypes: " +
|
||||
s"${evalTypes.mkString(",")}")
|
||||
}
|
||||
|
||||
def ambiguousFieldNameError(fieldName: String, names: String): Throwable = {
|
||||
new AnalysisException(
|
||||
s"Ambiguous field name: $fieldName. Found multiple columns that can match: $names")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -309,7 +309,7 @@ object QueryExecutionErrors {
|
|||
new IllegalStateException("table stats must be specified.")
|
||||
}
|
||||
|
||||
def unaryMinusCauseOverflowError(originValue: Short): ArithmeticException = {
|
||||
def unaryMinusCauseOverflowError(originValue: AnyVal): ArithmeticException = {
|
||||
new ArithmeticException(s"- $originValue caused overflow.")
|
||||
}
|
||||
|
||||
|
@ -772,4 +772,55 @@ object QueryExecutionErrors {
|
|||
new IllegalArgumentException(s"Unexpected: $o")
|
||||
}
|
||||
|
||||
def unscaledValueTooLargeForPrecisionError(): Throwable = {
|
||||
new ArithmeticException("Unscaled value too large for precision")
|
||||
}
|
||||
|
||||
def decimalPrecisionExceedsMaxPrecisionError(precision: Int, maxPrecision: Int): Throwable = {
|
||||
new ArithmeticException(
|
||||
s"Decimal precision $precision exceeds max precision $maxPrecision")
|
||||
}
|
||||
|
||||
def outOfDecimalTypeRangeError(str: UTF8String): Throwable = {
|
||||
new ArithmeticException(s"out of decimal type range: $str")
|
||||
}
|
||||
|
||||
def unsupportedArrayTypeError(clazz: Class[_]): Throwable = {
|
||||
new RuntimeException(s"Do not support array of type $clazz.")
|
||||
}
|
||||
|
||||
def unsupportedJavaTypeError(clazz: Class[_]): Throwable = {
|
||||
new RuntimeException(s"Do not support type $clazz.")
|
||||
}
|
||||
|
||||
def failedParsingStructTypeError(raw: String): Throwable = {
|
||||
new RuntimeException(s"Failed parsing ${StructType.simpleString}: $raw")
|
||||
}
|
||||
|
||||
def failedMergingFieldsError(leftName: String, rightName: String, e: Throwable): Throwable = {
|
||||
new SparkException(s"Failed to merge fields '$leftName' and '$rightName'. ${e.getMessage}")
|
||||
}
|
||||
|
||||
def cannotMergeDecimalTypesWithIncompatiblePrecisionAndScaleError(
|
||||
leftPrecision: Int, rightPrecision: Int, leftScale: Int, rightScale: Int): Throwable = {
|
||||
new SparkException("Failed to merge decimal types with incompatible " +
|
||||
s"precision $leftPrecision and $rightPrecision & scale $leftScale and $rightScale")
|
||||
}
|
||||
|
||||
def cannotMergeDecimalTypesWithIncompatiblePrecisionError(
|
||||
leftPrecision: Int, rightPrecision: Int): Throwable = {
|
||||
new SparkException("Failed to merge decimal types with incompatible " +
|
||||
s"precision $leftPrecision and $rightPrecision")
|
||||
}
|
||||
|
||||
def cannotMergeDecimalTypesWithIncompatibleScaleError(
|
||||
leftScale: Int, rightScale: Int): Throwable = {
|
||||
new SparkException("Failed to merge decimal types with incompatible " +
|
||||
s"scala $leftScale and $rightScale")
|
||||
}
|
||||
|
||||
def cannotMergeIncompatibleDataTypesError(left: DataType, right: DataType): Throwable = {
|
||||
new SparkException(s"Failed to merge incompatible data types ${left.catalogString}" +
|
||||
s" and ${right.catalogString}")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.math.{BigDecimal => JavaBigDecimal, BigInteger, MathContext, Roundin
|
|||
import scala.util.Try
|
||||
|
||||
import org.apache.spark.annotation.Unstable
|
||||
import org.apache.spark.sql.errors.QueryExecutionErrors
|
||||
import org.apache.spark.sql.internal.SQLConf
|
||||
import org.apache.spark.unsafe.types.UTF8String
|
||||
|
||||
|
@ -80,7 +81,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
*/
|
||||
def set(unscaled: Long, precision: Int, scale: Int): Decimal = {
|
||||
if (setOrNull(unscaled, precision, scale) == null) {
|
||||
throw new ArithmeticException("Unscaled value too large for precision")
|
||||
throw QueryExecutionErrors.unscaledValueTooLargeForPrecisionError()
|
||||
}
|
||||
this
|
||||
}
|
||||
|
@ -118,8 +119,8 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
DecimalType.checkNegativeScale(scale)
|
||||
this.decimalVal = decimal.setScale(scale, ROUND_HALF_UP)
|
||||
if (decimalVal.precision > precision) {
|
||||
throw new ArithmeticException(
|
||||
s"Decimal precision ${decimalVal.precision} exceeds max precision $precision")
|
||||
throw QueryExecutionErrors.decimalPrecisionExceedsMaxPrecisionError(
|
||||
decimalVal.precision, precision)
|
||||
}
|
||||
this.longVal = 0L
|
||||
this._precision = precision
|
||||
|
@ -251,7 +252,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
def toByte: Byte = toLong.toByte
|
||||
|
||||
private def overflowException(dataType: String) =
|
||||
throw new ArithmeticException(s"Casting $this to $dataType causes overflow")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, dataType)
|
||||
|
||||
/**
|
||||
* @return the Byte value that is equal to the rounded decimal.
|
||||
|
@ -263,14 +264,14 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
if (actualLongVal == actualLongVal.toByte) {
|
||||
actualLongVal.toByte
|
||||
} else {
|
||||
overflowException("byte")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, "byte")
|
||||
}
|
||||
} else {
|
||||
val doubleVal = decimalVal.toDouble
|
||||
if (Math.floor(doubleVal) <= Byte.MaxValue && Math.ceil(doubleVal) >= Byte.MinValue) {
|
||||
doubleVal.toByte
|
||||
} else {
|
||||
overflowException("byte")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, "byte")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -285,14 +286,14 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
if (actualLongVal == actualLongVal.toShort) {
|
||||
actualLongVal.toShort
|
||||
} else {
|
||||
overflowException("short")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, "short")
|
||||
}
|
||||
} else {
|
||||
val doubleVal = decimalVal.toDouble
|
||||
if (Math.floor(doubleVal) <= Short.MaxValue && Math.ceil(doubleVal) >= Short.MinValue) {
|
||||
doubleVal.toShort
|
||||
} else {
|
||||
overflowException("short")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, "short")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -307,14 +308,14 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
if (actualLongVal == actualLongVal.toInt) {
|
||||
actualLongVal.toInt
|
||||
} else {
|
||||
overflowException("int")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, "int")
|
||||
}
|
||||
} else {
|
||||
val doubleVal = decimalVal.toDouble
|
||||
if (Math.floor(doubleVal) <= Int.MaxValue && Math.ceil(doubleVal) >= Int.MinValue) {
|
||||
doubleVal.toInt
|
||||
} else {
|
||||
overflowException("int")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, "int")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -333,7 +334,8 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
// `longValueExact` to make sure the range check is accurate.
|
||||
decimalVal.bigDecimal.toBigInteger.longValueExact()
|
||||
} catch {
|
||||
case _: ArithmeticException => overflowException("long")
|
||||
case _: ArithmeticException =>
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(this, "long")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -365,8 +367,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
|
|||
if (nullOnOverflow) {
|
||||
null
|
||||
} else {
|
||||
throw new ArithmeticException(
|
||||
s"$toDebugString cannot be represented as Decimal($precision, $scale).")
|
||||
throw QueryExecutionErrors.cannotChangeDecimalPrecisionError(this, precision, scale)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -622,13 +623,13 @@ object Decimal {
|
|||
// We fast fail because constructing a very large JavaBigDecimal to Decimal is very slow.
|
||||
// For example: Decimal("6.0790316E+25569151")
|
||||
if (calculatePrecision(bigDecimal) > DecimalType.MAX_PRECISION) {
|
||||
throw new ArithmeticException(s"out of decimal type range: $str")
|
||||
throw QueryExecutionErrors.outOfDecimalTypeRangeError(str)
|
||||
} else {
|
||||
Decimal(bigDecimal)
|
||||
}
|
||||
} catch {
|
||||
case _: NumberFormatException =>
|
||||
throw new NumberFormatException(s"invalid input syntax for type numeric: $str")
|
||||
throw QueryExecutionErrors.invalidInputSyntaxForNumericError(str)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.json4s._
|
|||
import org.json4s.jackson.JsonMethods._
|
||||
|
||||
import org.apache.spark.annotation.Stable
|
||||
import org.apache.spark.sql.errors.QueryExecutionErrors
|
||||
|
||||
|
||||
/**
|
||||
|
@ -162,13 +163,13 @@ object Metadata {
|
|||
builder.putMetadataArray(
|
||||
key, value.asInstanceOf[List[JObject]].map(fromJObject).toArray)
|
||||
case other =>
|
||||
throw new RuntimeException(s"Do not support array of type ${other.getClass}.")
|
||||
throw QueryExecutionErrors.unsupportedArrayTypeError(other.getClass)
|
||||
}
|
||||
}
|
||||
case (key, JNull) =>
|
||||
builder.putNull(key)
|
||||
case (key, other) =>
|
||||
throw new RuntimeException(s"Do not support type ${other.getClass}.")
|
||||
throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
|
@ -195,7 +196,7 @@ object Metadata {
|
|||
case x: Metadata =>
|
||||
toJsonValue(x.map)
|
||||
case other =>
|
||||
throw new RuntimeException(s"Do not support type ${other.getClass}.")
|
||||
throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -222,7 +223,7 @@ object Metadata {
|
|||
case null =>
|
||||
0
|
||||
case other =>
|
||||
throw new RuntimeException(s"Do not support type ${other.getClass}.")
|
||||
throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,14 +23,13 @@ import scala.util.control.NonFatal
|
|||
|
||||
import org.json4s.JsonDSL._
|
||||
|
||||
import org.apache.spark.SparkException
|
||||
import org.apache.spark.annotation.Stable
|
||||
import org.apache.spark.sql.AnalysisException
|
||||
import org.apache.spark.sql.catalyst.analysis.Resolver
|
||||
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, InterpretedOrdering}
|
||||
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, LegacyTypeStringParser}
|
||||
import org.apache.spark.sql.catalyst.util.{truncatedString, StringUtils}
|
||||
import org.apache.spark.sql.catalyst.util.StringUtils.StringConcat
|
||||
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
|
||||
import org.apache.spark.sql.internal.SQLConf
|
||||
|
||||
/**
|
||||
|
@ -333,9 +332,8 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
|
|||
if (found.length > 1) {
|
||||
val names = found.map(f => prettyFieldName(normalizedPath :+ f.name))
|
||||
.mkString("[", ", ", " ]")
|
||||
throw new AnalysisException(
|
||||
s"Ambiguous field name: ${prettyFieldName(normalizedPath :+ searchName)}. Found " +
|
||||
s"multiple columns that can match: $names")
|
||||
throw QueryCompilationErrors.ambiguousFieldNameError(
|
||||
prettyFieldName(normalizedPath :+ searchName), names)
|
||||
} else if (found.isEmpty) {
|
||||
None
|
||||
} else {
|
||||
|
@ -523,7 +521,7 @@ object StructType extends AbstractDataType {
|
|||
private[sql] def fromString(raw: String): StructType = {
|
||||
Try(DataType.fromJson(raw)).getOrElse(LegacyTypeStringParser.parseString(raw)) match {
|
||||
case t: StructType => t
|
||||
case _ => throw new RuntimeException(s"Failed parsing ${StructType.simpleString}: $raw")
|
||||
case _ => throw QueryExecutionErrors.failedParsingStructTypeError(raw)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -586,8 +584,7 @@ object StructType extends AbstractDataType {
|
|||
nullable = leftNullable || rightNullable)
|
||||
} catch {
|
||||
case NonFatal(e) =>
|
||||
throw new SparkException(s"Failed to merge fields '$leftName' and " +
|
||||
s"'$rightName'. " + e.getMessage)
|
||||
throw QueryExecutionErrors.failedMergingFieldsError(leftName, rightName, e)
|
||||
}
|
||||
}
|
||||
.orElse {
|
||||
|
@ -610,14 +607,14 @@ object StructType extends AbstractDataType {
|
|||
if ((leftPrecision == rightPrecision) && (leftScale == rightScale)) {
|
||||
DecimalType(leftPrecision, leftScale)
|
||||
} else if ((leftPrecision != rightPrecision) && (leftScale != rightScale)) {
|
||||
throw new SparkException("Failed to merge decimal types with incompatible " +
|
||||
s"precision $leftPrecision and $rightPrecision & scale $leftScale and $rightScale")
|
||||
throw QueryExecutionErrors.cannotMergeDecimalTypesWithIncompatiblePrecisionAndScaleError(
|
||||
leftPrecision, rightPrecision, leftScale, rightScale)
|
||||
} else if (leftPrecision != rightPrecision) {
|
||||
throw new SparkException("Failed to merge decimal types with incompatible " +
|
||||
s"precision $leftPrecision and $rightPrecision")
|
||||
throw QueryExecutionErrors.cannotMergeDecimalTypesWithIncompatiblePrecisionError(
|
||||
leftPrecision, rightPrecision)
|
||||
} else {
|
||||
throw new SparkException("Failed to merge decimal types with incompatible " +
|
||||
s"scala $leftScale and $rightScale")
|
||||
throw QueryExecutionErrors.cannotMergeDecimalTypesWithIncompatibleScaleError(
|
||||
leftScale, rightScale)
|
||||
}
|
||||
|
||||
case (leftUdt: UserDefinedType[_], rightUdt: UserDefinedType[_])
|
||||
|
@ -627,8 +624,7 @@ object StructType extends AbstractDataType {
|
|||
leftType
|
||||
|
||||
case _ =>
|
||||
throw new SparkException(s"Failed to merge incompatible data types ${left.catalogString}" +
|
||||
s" and ${right.catalogString}")
|
||||
throw QueryExecutionErrors.cannotMergeIncompatibleDataTypesError(left, right)
|
||||
}
|
||||
|
||||
private[sql] def fieldsMap(fields: Array[StructField]): Map[String, StructField] = {
|
||||
|
|
|
@ -21,12 +21,13 @@ import scala.math.Numeric._
|
|||
import scala.math.Ordering
|
||||
|
||||
import org.apache.spark.sql.catalyst.util.SQLOrderingUtil
|
||||
import org.apache.spark.sql.errors.QueryExecutionErrors
|
||||
import org.apache.spark.sql.types.Decimal.DecimalIsConflicted
|
||||
|
||||
private[sql] object ByteExactNumeric extends ByteIsIntegral with Ordering.ByteOrdering {
|
||||
private def checkOverflow(res: Int, x: Byte, y: Byte, op: String): Unit = {
|
||||
if (res > Byte.MaxValue || res < Byte.MinValue) {
|
||||
throw new ArithmeticException(s"$x $op $y caused overflow.")
|
||||
throw QueryExecutionErrors.binaryArithmeticCauseOverflowError(x, op, y)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,7 +51,7 @@ private[sql] object ByteExactNumeric extends ByteIsIntegral with Ordering.ByteOr
|
|||
|
||||
override def negate(x: Byte): Byte = {
|
||||
if (x == Byte.MinValue) { // if and only if x is Byte.MinValue, overflow can happen
|
||||
throw new ArithmeticException(s"- $x caused overflow.")
|
||||
throw QueryExecutionErrors.unaryMinusCauseOverflowError(x)
|
||||
}
|
||||
(-x).toByte
|
||||
}
|
||||
|
@ -60,7 +61,7 @@ private[sql] object ByteExactNumeric extends ByteIsIntegral with Ordering.ByteOr
|
|||
private[sql] object ShortExactNumeric extends ShortIsIntegral with Ordering.ShortOrdering {
|
||||
private def checkOverflow(res: Int, x: Short, y: Short, op: String): Unit = {
|
||||
if (res > Short.MaxValue || res < Short.MinValue) {
|
||||
throw new ArithmeticException(s"$x $op $y caused overflow.")
|
||||
throw QueryExecutionErrors.binaryArithmeticCauseOverflowError(x, op, y)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,7 +85,7 @@ private[sql] object ShortExactNumeric extends ShortIsIntegral with Ordering.Shor
|
|||
|
||||
override def negate(x: Short): Short = {
|
||||
if (x == Short.MinValue) { // if and only if x is Byte.MinValue, overflow can happen
|
||||
throw new ArithmeticException(s"- $x caused overflow.")
|
||||
throw QueryExecutionErrors.unaryMinusCauseOverflowError(x)
|
||||
}
|
||||
(-x).toShort
|
||||
}
|
||||
|
@ -114,14 +115,11 @@ private[sql] object LongExactNumeric extends LongIsIntegral with Ordering.LongOr
|
|||
if (x == x.toInt) {
|
||||
x.toInt
|
||||
} else {
|
||||
throw new ArithmeticException(s"Casting $x to int causes overflow")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(x, "int")
|
||||
}
|
||||
}
|
||||
|
||||
private[sql] object FloatExactNumeric extends FloatIsFractional {
|
||||
private def overflowException(x: Float, dataType: String) =
|
||||
throw new ArithmeticException(s"Casting $x to $dataType causes overflow")
|
||||
|
||||
private val intUpperBound = Int.MaxValue
|
||||
private val intLowerBound = Int.MinValue
|
||||
private val longUpperBound = Long.MaxValue
|
||||
|
@ -137,7 +135,7 @@ private[sql] object FloatExactNumeric extends FloatIsFractional {
|
|||
if (Math.floor(x) <= intUpperBound && Math.ceil(x) >= intLowerBound) {
|
||||
x.toInt
|
||||
} else {
|
||||
overflowException(x, "int")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(x, "int")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,7 +143,7 @@ private[sql] object FloatExactNumeric extends FloatIsFractional {
|
|||
if (Math.floor(x) <= longUpperBound && Math.ceil(x) >= longLowerBound) {
|
||||
x.toLong
|
||||
} else {
|
||||
overflowException(x, "int")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(x, "int")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -153,9 +151,6 @@ private[sql] object FloatExactNumeric extends FloatIsFractional {
|
|||
}
|
||||
|
||||
private[sql] object DoubleExactNumeric extends DoubleIsFractional {
|
||||
private def overflowException(x: Double, dataType: String) =
|
||||
throw new ArithmeticException(s"Casting $x to $dataType causes overflow")
|
||||
|
||||
private val intUpperBound = Int.MaxValue
|
||||
private val intLowerBound = Int.MinValue
|
||||
private val longUpperBound = Long.MaxValue
|
||||
|
@ -165,7 +160,7 @@ private[sql] object DoubleExactNumeric extends DoubleIsFractional {
|
|||
if (Math.floor(x) <= intUpperBound && Math.ceil(x) >= intLowerBound) {
|
||||
x.toInt
|
||||
} else {
|
||||
overflowException(x, "int")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(x, "int")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -173,7 +168,7 @@ private[sql] object DoubleExactNumeric extends DoubleIsFractional {
|
|||
if (Math.floor(x) <= longUpperBound && Math.ceil(x) >= longLowerBound) {
|
||||
x.toLong
|
||||
} else {
|
||||
overflowException(x, "long")
|
||||
throw QueryExecutionErrors.castingCauseOverflowError(x, "long")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue