[SPARK-17246][SQL] Add BigDecimal literal

## What changes were proposed in this pull request?
This PR adds parser support for `BigDecimal` literals. If you append the suffix `BD` to a valid number then this will be interpreted as a `BigDecimal`, for example `12.0E10BD` will interpreted into a BigDecimal with scale -9 and precision 3. This is useful in situations where you need exact values.

## How was this patch tested?
Added tests to `ExpressionParserSuite`, `ExpressionSQLBuilderSuite` and `SQLQueryTestSuite`.

Author: Herman van Hovell <hvanhovell@databricks.com>

Closes #14819 from hvanhovell/SPARK-17246.
This commit is contained in:
Herman van Hovell 2016-08-26 13:29:22 -07:00 committed by Reynold Xin
parent 8e5475be3c
commit a11d10f182
7 changed files with 59 additions and 3 deletions

View file

@ -633,6 +633,7 @@ number
| MINUS? SMALLINT_LITERAL #smallIntLiteral
| MINUS? TINYINT_LITERAL #tinyIntLiteral
| MINUS? DOUBLE_LITERAL #doubleLiteral
| MINUS? BIGDECIMAL_LITERAL #bigDecimalLiteral
;
nonReserved
@ -928,6 +929,11 @@ DOUBLE_LITERAL
(INTEGER_VALUE | DECIMAL_VALUE | SCIENTIFIC_DECIMAL_VALUE) 'D'
;
BIGDECIMAL_LITERAL
:
(INTEGER_VALUE | DECIMAL_VALUE | SCIENTIFIC_DECIMAL_VALUE) 'BD'
;
IDENTIFIER
: (LETTER | DIGIT | '_')+
;

View file

@ -266,7 +266,7 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression with
case Double.NegativeInfinity => s"CAST('-Infinity' AS ${DoubleType.sql})"
case _ => v + "D"
}
case (v: Decimal, t: DecimalType) => s"CAST($v AS ${t.sql})"
case (v: Decimal, t: DecimalType) => v + "BD"
case (v: Int, DateType) => s"DATE '${DateTimeUtils.toJavaDate(v)}'"
case (v: Long, TimestampType) => s"TIMESTAMP('${DateTimeUtils.toJavaTimestamp(v)}')"
case _ => value.toString

View file

@ -26,7 +26,8 @@ import org.antlr.v4.runtime.{ParserRuleContext, Token}
import org.antlr.v4.runtime.tree.{ParseTree, RuleNode, TerminalNode}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
@ -1323,6 +1324,19 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
numericLiteral(ctx, Double.MinValue, Double.MaxValue, DoubleType.simpleString)(_.toDouble)
}
/**
* Create a BigDecimal Literal expression.
*/
override def visitBigDecimalLiteral(ctx: BigDecimalLiteralContext): Literal = {
val raw = ctx.getText.substring(0, ctx.getText.length - 2)
try {
Literal(BigDecimal(raw).underlying())
} catch {
case e: AnalysisException =>
throw new ParseException(e.message, ctx)
}
}
/**
* Create a String literal expression.
*/

View file

@ -392,6 +392,13 @@ class ExpressionParserSuite extends PlanTest {
intercept("1.8E308D", s"does not fit in range")
// TODO we need to figure out if we should throw an exception here!
assertEqual("1E309", Literal(Double.PositiveInfinity))
// BigDecimal Literal
assertEqual("90912830918230182310293801923652346786BD",
Literal(BigDecimal("90912830918230182310293801923652346786").underlying()))
assertEqual("123.0E-28BD", Literal(BigDecimal("123.0E-28").underlying()))
assertEqual("123.08BD", Literal(BigDecimal("123.08").underlying()))
intercept("1.20E-38BD", "DecimalType can only support precision up to 38")
}
test("strings") {

View file

@ -90,3 +90,9 @@ select interval 10 nanoseconds;
-- unsupported data type
select GEO '(10,-6)';
-- big decimal parsing
select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD;
-- out of range big decimal
select 1.20E-38BD;

View file

@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 38
-- Number of queries: 40
-- !query 0
@ -354,3 +354,25 @@ Literals of type 'GEO' are currently not supported.(line 1, pos 7)
== SQL ==
select GEO '(10,-6)'
-------^^^
-- !query 38
select 90912830918230182310293801923652346786BD, 123.0E-28BD, 123.08BD
-- !query 38 schema
struct<90912830918230182310293801923652346786:decimal(38,0),1.230E-26:decimal(29,29),123.08:decimal(5,2)>
-- !query 38 output
90912830918230182310293801923652346786 0.0000000000000000000000000123 123.08
-- !query 39
select 1.20E-38BD
-- !query 39 schema
struct<>
-- !query 39 output
org.apache.spark.sql.catalyst.parser.ParseException
DecimalType can only support precision up to 38(line 1, pos 7)
== SQL ==
select 1.20E-38BD
-------^^^

View file

@ -39,6 +39,7 @@ class ExpressionSQLBuilderSuite extends SQLBuilderTest {
checkSQL(Literal(Double.PositiveInfinity), "CAST('Infinity' AS DOUBLE)")
checkSQL(Literal(Double.NegativeInfinity), "CAST('-Infinity' AS DOUBLE)")
checkSQL(Literal(Double.NaN), "CAST('NaN' AS DOUBLE)")
checkSQL(Literal(BigDecimal("10.0000000").underlying), "10.0000000BD")
checkSQL(
Literal(Timestamp.valueOf("2016-01-01 00:00:00")), "TIMESTAMP('2016-01-01 00:00:00.0')")
// TODO tests for decimals