[SPARK-6765] Fix test code style for SQL

So we can turn style checker on for test code.

Author: Reynold Xin <rxin@databricks.com>

Closes #5412 from rxin/test-style-sql and squashes the following commits:

9098a31 [Reynold Xin] One more compilation error ...
8c7250a [Reynold Xin] Fix compilation.
82d0944 [Reynold Xin] Indentation.
0b03fbb [Reynold Xin] code review.
f2f4348 [Reynold Xin] oops.
ef4ec48 [Reynold Xin] Hive module.
7e0db5e [Reynold Xin] sql module
04ec7ac [Reynold Xin] catalyst module
This commit is contained in:
Reynold Xin 2015-04-08 20:35:29 -07:00
parent 891ada5be1
commit 1b2aab8d5b
45 changed files with 395 additions and 234 deletions

View file

@ -30,7 +30,7 @@ class DistributionSuite extends FunSuite {
inputPartitioning: Partitioning,
requiredDistribution: Distribution,
satisfied: Boolean) {
if (inputPartitioning.satisfies(requiredDistribution) != satisfied)
if (inputPartitioning.satisfies(requiredDistribution) != satisfied) {
fail(
s"""
|== Input Partitioning ==
@ -41,6 +41,7 @@ class DistributionSuite extends FunSuite {
|Expected $satisfied got ${inputPartitioning.satisfies(requiredDistribution)}
""".stripMargin)
}
}
test("HashPartitioning is the output partitioning") {
// Cases which do not need an exchange between two data properties.

View file

@ -27,6 +27,8 @@ import org.apache.spark.sql.types._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import scala.collection.immutable
class AnalysisSuite extends FunSuite with BeforeAndAfter {
val caseSensitiveCatalog = new SimpleCatalog(true)
val caseInsensitiveCatalog = new SimpleCatalog(false)
@ -41,10 +43,10 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
}
def caseSensitiveAnalyze(plan: LogicalPlan) =
def caseSensitiveAnalyze(plan: LogicalPlan): Unit =
caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer(plan))
def caseInsensitiveAnalyze(plan: LogicalPlan) =
def caseInsensitiveAnalyze(plan: LogicalPlan): Unit =
caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer(plan))
val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
@ -147,7 +149,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
name: String,
plan: LogicalPlan,
errorMessages: Seq[String],
caseSensitive: Boolean = true) = {
caseSensitive: Boolean = true): Unit = {
test(name) {
val error = intercept[AnalysisException] {
if(caseSensitive) {
@ -202,7 +204,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
case class UnresolvedTestPlan() extends LeafNode {
override lazy val resolved = false
override def output = Nil
override def output: Seq[Attribute] = Nil
}
errorTest(

View file

@ -96,7 +96,9 @@ class HiveTypeCoercionSuite extends PlanTest {
widenTest(StringType, TimestampType, None)
// ComplexType
widenTest(NullType, MapType(IntegerType, StringType, false), Some(MapType(IntegerType, StringType, false)))
widenTest(NullType,
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
widenTest(StringType, MapType(IntegerType, StringType, true), None)
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
@ -113,7 +115,9 @@ class HiveTypeCoercionSuite extends PlanTest {
// Remove superflous boolean -> boolean casts.
ruleTest(Cast(Literal(true), BooleanType), Literal(true))
// Stringify boolean when casting to string.
ruleTest(Cast(Literal(false), StringType), If(Literal(false), Literal("true"), Literal("false")))
ruleTest(
Cast(Literal(false), StringType),
If(Literal(false), Literal("true"), Literal("false")))
}
test("coalesce casts") {

View file

@ -82,10 +82,13 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
assert(BitwiseNot(1.toByte).eval(EmptyRow).isInstanceOf[Byte])
}
// scalastyle:off
/**
* Checks for three-valued-logic. Based on:
* http://en.wikipedia.org/wiki/Null_(SQL)#Comparisons_with_NULL_and_the_three-valued_logic_.283VL.29
* I.e. in flat cpo "False -> Unknown -> True", OR is lowest upper bound, AND is greatest lower bound.
* I.e. in flat cpo "False -> Unknown -> True",
* OR is lowest upper bound,
* AND is greatest lower bound.
* p q p OR q p AND q p = q
* True True True True True
* True False True False False
@ -102,7 +105,7 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
* False True
* Unknown Unknown
*/
// scalastyle:on
val notTrueTable =
(true, false) ::
(false, true) ::
@ -165,7 +168,9 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(In(Literal(1), Seq(Literal(1), Literal(2))), true)
checkEvaluation(In(Literal(2), Seq(Literal(1), Literal(2))), true)
checkEvaluation(In(Literal(3), Seq(Literal(1), Literal(2))), false)
checkEvaluation(In(Literal(1), Seq(Literal(1), Literal(2))) && In(Literal(2), Seq(Literal(1), Literal(2))), true)
checkEvaluation(
In(Literal(1), Seq(Literal(1), Literal(2))) && In(Literal(2), Seq(Literal(1), Literal(2))),
true)
}
test("Divide") {
@ -180,7 +185,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Divide(Literal.create(null, IntegerType), Literal(0)), null)
checkEvaluation(Divide(Literal.create(null, DoubleType), Literal(0.0)), null)
checkEvaluation(Divide(Literal.create(null, IntegerType), Literal(1)), null)
checkEvaluation(Divide(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null)
checkEvaluation(Divide(Literal.create(null, IntegerType), Literal.create(null, IntegerType)),
null)
}
test("Remainder") {
@ -195,7 +201,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal(0)), null)
checkEvaluation(Remainder(Literal.create(null, DoubleType), Literal(0.0)), null)
checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal(1)), null)
checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null)
checkEvaluation(Remainder(Literal.create(null, IntegerType), Literal.create(null, IntegerType)),
null)
}
test("INSET") {
@ -264,7 +271,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation("ab" like regEx, true, new GenericRow(Array[Any]("a%b")))
checkEvaluation("a\nb" like regEx, true, new GenericRow(Array[Any]("a%b")))
checkEvaluation(Literal.create(null, StringType) like regEx, null, new GenericRow(Array[Any]("bc%")))
checkEvaluation(Literal.create(null, StringType) like regEx, null,
new GenericRow(Array[Any]("bc%")))
}
test("RLIKE literal Regular Expression") {
@ -507,8 +515,10 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
}
test("array casting") {
val array = Literal.create(Seq("123", "abc", "", null), ArrayType(StringType, containsNull = true))
val array_notNull = Literal.create(Seq("123", "abc", ""), ArrayType(StringType, containsNull = false))
val array = Literal.create(Seq("123", "abc", "", null),
ArrayType(StringType, containsNull = true))
val array_notNull = Literal.create(Seq("123", "abc", ""),
ArrayType(StringType, containsNull = false))
{
val cast = Cast(array, ArrayType(IntegerType, containsNull = true))
@ -765,7 +775,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Coalesce(Literal.create(null, StringType) :: Nil), null, row)
checkEvaluation(Coalesce(Literal.create(null, StringType) :: c1 :: c2 :: Nil), "^Ba*n", row)
checkEvaluation(If(c3, Literal.create("a", StringType), Literal.create("b", StringType)), "a", row)
checkEvaluation(
If(c3, Literal.create("a", StringType), Literal.create("b", StringType)), "a", row)
checkEvaluation(If(c3, c1, c2), "^Ba*n", row)
checkEvaluation(If(c4, c2, c1), "^Ba*n", row)
checkEvaluation(If(Literal.create(null, BooleanType), c2, c1), "^Ba*n", row)
@ -842,18 +853,20 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(GetItem(BoundReference(3, typeMap, true),
Literal("aa")), "bb", row)
checkEvaluation(GetItem(Literal.create(null, typeMap), Literal("aa")), null, row)
checkEvaluation(GetItem(Literal.create(null, typeMap), Literal.create(null, StringType)), null, row)
checkEvaluation(
GetItem(Literal.create(null, typeMap), Literal.create(null, StringType)), null, row)
checkEvaluation(GetItem(BoundReference(3, typeMap, true),
Literal.create(null, StringType)), null, row)
checkEvaluation(GetItem(BoundReference(4, typeArray, true),
Literal(1)), "bb", row)
checkEvaluation(GetItem(Literal.create(null, typeArray), Literal(1)), null, row)
checkEvaluation(GetItem(Literal.create(null, typeArray), Literal.create(null, IntegerType)), null, row)
checkEvaluation(
GetItem(Literal.create(null, typeArray), Literal.create(null, IntegerType)), null, row)
checkEvaluation(GetItem(BoundReference(4, typeArray, true),
Literal.create(null, IntegerType)), null, row)
def quickBuildGetField(expr: Expression, fieldName: String) = {
def quickBuildGetField(expr: Expression, fieldName: String): StructGetField = {
expr.dataType match {
case StructType(fields) =>
val field = fields.find(_.name == fieldName).get
@ -861,7 +874,9 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
}
}
def quickResolve(u: UnresolvedGetField) = quickBuildGetField(u.child, u.fieldName)
def quickResolve(u: UnresolvedGetField): StructGetField = {
quickBuildGetField(u.child, u.fieldName)
}
checkEvaluation(quickBuildGetField(BoundReference(2, typeS, nullable = true), "a"), "aa", row)
checkEvaluation(quickBuildGetField(Literal.create(null, typeS), "a"), null, row)
@ -872,7 +887,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
)
assert(quickBuildGetField(BoundReference(2,typeS, nullable = true), "a").nullable === true)
assert(quickBuildGetField(BoundReference(2, typeS_notNullable, nullable = false), "a").nullable === false)
assert(quickBuildGetField(BoundReference(2, typeS_notNullable, nullable = false), "a").nullable
=== false)
assert(quickBuildGetField(Literal.create(null, typeS), "a").nullable === true)
assert(quickBuildGetField(Literal.create(null, typeS_notNullable), "a").nullable === true)
@ -896,7 +912,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Add(c1, c2), 3, row)
checkEvaluation(Add(c1, Literal.create(null, IntegerType)), null, row)
checkEvaluation(Add(Literal.create(null, IntegerType), c2), null, row)
checkEvaluation(Add(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(
Add(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(-c1, -1, row)
checkEvaluation(c1 + c2, 3, row)
@ -919,7 +936,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(Add(c1, c2), 3.1, row)
checkEvaluation(Add(c1, Literal.create(null, DoubleType)), null, row)
checkEvaluation(Add(Literal.create(null, DoubleType), c2), null, row)
checkEvaluation(Add(Literal.create(null, DoubleType), Literal.create(null, DoubleType)), null, row)
checkEvaluation(
Add(Literal.create(null, DoubleType), Literal.create(null, DoubleType)), null, row)
checkEvaluation(-c1, -1.1, row)
checkEvaluation(c1 + c2, 3.1, row)
@ -942,7 +960,8 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(LessThan(c1, c2), true, row)
checkEvaluation(LessThan(c1, Literal.create(null, IntegerType)), null, row)
checkEvaluation(LessThan(Literal.create(null, IntegerType), c2), null, row)
checkEvaluation(LessThan(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(
LessThan(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(c1 < c2, true, row)
checkEvaluation(c1 <= c2, true, row)
@ -985,54 +1004,84 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
val s = 'a.string.at(0)
// substring from zero position with less-than-full length
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)), "ex", row)
checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(2, IntegerType)), "ex", row)
checkEvaluation(
Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)), "ex", row)
checkEvaluation(
Substring(s, Literal.create(1, IntegerType), Literal.create(2, IntegerType)), "ex", row)
// substring from zero position with full length
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(7, IntegerType)), "example", row)
checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(7, IntegerType)), "example", row)
checkEvaluation(
Substring(s, Literal.create(0, IntegerType), Literal.create(7, IntegerType)), "example", row)
checkEvaluation(
Substring(s, Literal.create(1, IntegerType), Literal.create(7, IntegerType)), "example", row)
// substring from zero position with greater-than-full length
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(100, IntegerType)), "example", row)
checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(100, IntegerType)), "example", row)
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(100, IntegerType)),
"example", row)
checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(100, IntegerType)),
"example", row)
// substring from nonzero position with less-than-full length
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(2, IntegerType)), "xa", row)
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(2, IntegerType)),
"xa", row)
// substring from nonzero position with full length
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(6, IntegerType)), "xample", row)
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(6, IntegerType)),
"xample", row)
// substring from nonzero position with greater-than-full length
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(100, IntegerType)), "xample", row)
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(100, IntegerType)),
"xample", row)
// zero-length substring (within string bounds)
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(0, IntegerType)), "", row)
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(0, IntegerType)),
"", row)
// zero-length substring (beyond string bounds)
checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)), "", row)
checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)),
"", row)
// substring(null, _, _) -> null
checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)), null, new GenericRow(Array[Any](null)))
checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)),
null, new GenericRow(Array[Any](null)))
// substring(_, null, _) -> null
checkEvaluation(Substring(s, Literal.create(null, IntegerType), Literal.create(4, IntegerType)), null, row)
checkEvaluation(Substring(s, Literal.create(null, IntegerType), Literal.create(4, IntegerType)),
null, row)
// substring(_, _, null) -> null
checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(
Substring(s, Literal.create(100, IntegerType), Literal.create(null, IntegerType)),
null,
row)
// 2-arg substring from zero position
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)), "example", row)
checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)), "example", row)
checkEvaluation(
Substring(s, Literal.create(0, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
"example",
row)
checkEvaluation(
Substring(s, Literal.create(1, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
"example",
row)
// 2-arg substring from nonzero position
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)), "xample", row)
checkEvaluation(
Substring(s, Literal.create(2, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
"xample",
row)
val s_notNull = 'a.string.notNull.at(0)
assert(Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable === true)
assert(Substring(s_notNull, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable === false)
assert(Substring(s_notNull, Literal.create(null, IntegerType), Literal.create(2, IntegerType)).nullable === true)
assert(Substring(s_notNull, Literal.create(0, IntegerType), Literal.create(null, IntegerType)).nullable === true)
assert(Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable
=== true)
assert(
Substring(s_notNull, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable
=== false)
assert(Substring(s_notNull,
Literal.create(null, IntegerType), Literal.create(2, IntegerType)).nullable === true)
assert(Substring(s_notNull,
Literal.create(0, IntegerType), Literal.create(null, IntegerType)).nullable === true)
checkEvaluation(s.substr(0, 2), "ex", row)
checkEvaluation(s.substr(0), "example", row)
@ -1065,17 +1114,20 @@ class ExpressionEvaluationSuite extends ExpressionEvaluationBaseSuite {
checkEvaluation(BitwiseAnd(c1, c4), null, row)
checkEvaluation(BitwiseAnd(c1, c2), 0, row)
checkEvaluation(BitwiseAnd(c1, Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseAnd(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(
BitwiseAnd(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseOr(c1, c4), null, row)
checkEvaluation(BitwiseOr(c1, c2), 3, row)
checkEvaluation(BitwiseOr(c1, Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseOr(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(
BitwiseOr(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseXor(c1, c4), null, row)
checkEvaluation(BitwiseXor(c1, c2), 3, row)
checkEvaluation(BitwiseXor(c1, Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseXor(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(
BitwiseXor(Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null, row)
checkEvaluation(BitwiseNot(c4), null, row)
checkEvaluation(BitwiseNot(c1), -2, row)

View file

@ -176,14 +176,13 @@ class ConstantFoldingSuite extends PlanTest {
}
test("Constant folding test: expressions have null literals") {
val originalQuery =
testRelation
.select(
val originalQuery = testRelation.select(
IsNull(Literal(null)) as 'c1,
IsNotNull(Literal(null)) as 'c2,
GetItem(Literal.create(null, ArrayType(IntegerType)), 1) as 'c3,
GetItem(Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
GetItem(
Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
UnresolvedGetField(
Literal.create(null, StructType(Seq(StructField("a", IntegerType, true)))),
"a") as 'c5,

View file

@ -432,7 +432,8 @@ class FilterPushdownSuite extends PlanTest {
val originalQuery = {
z.join(x.join(y))
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) && ("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) &&
("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
}
val optimized = Optimize(originalQuery.analyze)

View file

@ -52,7 +52,7 @@ class OptimizeInSuite extends PlanTest {
val optimized = Optimize(originalQuery.analyze)
val correctAnswer =
testRelation
.where(InSet(UnresolvedAttribute("a"), HashSet[Any]()+1+2))
.where(InSet(UnresolvedAttribute("a"), HashSet[Any]() + 1 + 2))
.analyze
comparePlans(optimized, correctAnswer)

View file

@ -45,13 +45,14 @@ class PlanTest extends FunSuite {
protected def comparePlans(plan1: LogicalPlan, plan2: LogicalPlan) {
val normalized1 = normalizeExprIds(plan1)
val normalized2 = normalizeExprIds(plan2)
if (normalized1 != normalized2)
if (normalized1 != normalized2) {
fail(
s"""
|== FAIL: Plans do not match ===
|${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\n")}
""".stripMargin)
}
}
/** Fails the test if the two expressions do not match */
protected def compareExpressions(e1: Expression, e2: Expression): Unit = {

View file

@ -32,7 +32,7 @@ class SameResultSuite extends FunSuite {
val testRelation = LocalRelation('a.int, 'b.int, 'c.int)
val testRelation2 = LocalRelation('a.int, 'b.int, 'c.int)
def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true) = {
def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true): Unit = {
val aAnalyzed = a.analyze
val bAnalyzed = b.analyze

View file

@ -25,12 +25,12 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types.{StringType, NullType}
case class Dummy(optKey: Option[Expression]) extends Expression {
def children = optKey.toSeq
def nullable = true
def dataType = NullType
def children: Seq[Expression] = optKey.toSeq
def nullable: Boolean = true
def dataType: NullType = NullType
override lazy val resolved = true
type EvaluatedType = Any
def eval(input: Row) = null.asInstanceOf[Any]
def eval(input: Row): Any = null.asInstanceOf[Any]
}
class TreeNodeSuite extends FunSuite {

View file

@ -92,7 +92,8 @@ class CachedTableSuite extends QueryTest {
test("too big for memory") {
val data = "*" * 10000
sparkContext.parallelize(1 to 200000, 1).map(_ => BigData(data)).toDF().registerTempTable("bigData")
sparkContext.parallelize(1 to 200000, 1).map(_ => BigData(data)).toDF()
.registerTempTable("bigData")
table("bigData").persist(StorageLevel.MEMORY_AND_DISK)
assert(table("bigData").count() === 200000L)
table("bigData").unpersist(blocking = true)

View file

@ -329,8 +329,9 @@ class DataFrameSuite extends QueryTest {
checkAnswer(
decimalData.agg(avg('a cast DecimalType(10, 2))),
Row(new java.math.BigDecimal(2.0)))
// non-partial
checkAnswer(
decimalData.agg(avg('a cast DecimalType(10, 2)), sumDistinct('a cast DecimalType(10, 2))), // non-partial
decimalData.agg(avg('a cast DecimalType(10, 2)), sumDistinct('a cast DecimalType(10, 2))),
Row(new java.math.BigDecimal(2.0), new java.math.BigDecimal(6)) :: Nil)
}

View file

@ -67,7 +67,7 @@ class QueryTest extends PlanTest {
checkAnswer(df, Seq(expectedAnswer))
}
def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext): Unit = {
def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext) {
test(sqlString) {
checkAnswer(sqlContext.sql(sqlString), expectedAnswer)
}

View file

@ -268,7 +268,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002")))
checkAnswer(sql(
"SELECT time FROM timestamps WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002')"),
"""
|SELECT time FROM timestamps
|WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002')
""".stripMargin),
Seq(Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001")),
Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002"))))
@ -334,7 +337,7 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Row("1"))
}
def sortTest() = {
def sortTest(): Unit = {
checkAnswer(
sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC"),
Seq(Row(1,1), Row(1,2), Row(2,1), Row(2,2), Row(3,1), Row(3,2)))
@ -413,7 +416,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
test("from follow multiple brackets") {
checkAnswer(sql(
"select key from ((select * from testData limit 1) union all (select * from testData limit 1)) x limit 1"),
"""
|select key from ((select * from testData limit 1)
| union all (select * from testData limit 1)) x limit 1
""".stripMargin),
Row(1)
)
@ -423,7 +429,11 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
)
checkAnswer(sql(
"select key from (select * from testData limit 1 union all select * from testData limit 1) x limit 1"),
"""
|select key from
| (select * from testData limit 1 union all select * from testData limit 1) x
| limit 1
""".stripMargin),
Row(1)
)
}
@ -470,7 +480,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Seq(Row(1, 0), Row(2, 1)))
checkAnswer(
sql("SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3"),
sql(
"""
|SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3
""".stripMargin),
Row(2, 1, 2, 2, 1))
}
@ -1083,7 +1096,8 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
}
test("SPARK-3483 Special chars in column names") {
val data = sparkContext.parallelize(Seq("""{"key?number1": "value1", "key.number2": "value2"}"""))
val data = sparkContext.parallelize(
Seq("""{"key?number1": "value1", "key.number2": "value2"}"""))
jsonRDD(data).registerTempTable("records")
sql("SELECT `key?number1` FROM records")
}
@ -1168,8 +1182,8 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
}
test("SPARK-6145: ORDER BY test for nested fields") {
jsonRDD(sparkContext.makeRDD(
"""{"a": {"b": 1, "a": {"a": 1}}, "c": [{"d": 1}]}""" :: Nil)).registerTempTable("nestedOrder")
jsonRDD(sparkContext.makeRDD("""{"a": {"b": 1, "a": {"a": 1}}, "c": [{"d": 1}]}""" :: Nil))
.registerTempTable("nestedOrder")
checkAnswer(sql("SELECT 1 FROM nestedOrder ORDER BY a.b"), Row(1))
checkAnswer(sql("SELECT a.b FROM nestedOrder ORDER BY a.b"), Row(1))

View file

@ -103,7 +103,8 @@ class ScalaReflectionRelationSuite extends FunSuite {
val rdd = sparkContext.parallelize(data :: Nil)
rdd.toDF().registerTempTable("reflectOptionalData")
assert(sql("SELECT * FROM reflectOptionalData").collect().head === Row.fromSeq(Seq.fill(7)(null)))
assert(sql("SELECT * FROM reflectOptionalData").collect().head ===
Row.fromSeq(Seq.fill(7)(null)))
}
// Equality is broken for Arrays, so we test that separately.

View file

@ -63,7 +63,7 @@ private[sql] class MyDenseVectorUDT extends UserDefinedType[MyDenseVector] {
}
}
override def userClass = classOf[MyDenseVector]
override def userClass: Class[MyDenseVector] = classOf[MyDenseVector]
private[spark] override def asNullable: MyDenseVectorUDT = this
}

View file

@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
import org.apache.spark.sql.types.{Decimal, DataType, NativeType}
object ColumnarTestUtils {
def makeNullRow(length: Int) = {
def makeNullRow(length: Int): GenericMutableRow = {
val row = new GenericMutableRow(length)
(0 until length).foreach(row.setNullAt)
row
@ -93,7 +93,7 @@ object ColumnarTestUtils {
def makeUniqueValuesAndSingleValueRows[T <: NativeType](
columnType: NativeColumnType[T],
count: Int) = {
count: Int): (Seq[T#JvmType], Seq[GenericMutableRow]) = {
val values = makeUniqueRandomValues(columnType, count)
val rows = values.map { value =>

View file

@ -31,7 +31,8 @@ class TestNullableColumnAccessor[T <: DataType, JvmType](
with NullableColumnAccessor
object TestNullableColumnAccessor {
def apply[T <: DataType, JvmType](buffer: ByteBuffer, columnType: ColumnType[T, JvmType]) = {
def apply[T <: DataType, JvmType](buffer: ByteBuffer, columnType: ColumnType[T, JvmType])
: TestNullableColumnAccessor[T, JvmType] = {
// Skips the column type ID
buffer.getInt()
new TestNullableColumnAccessor(buffer, columnType)

View file

@ -27,7 +27,8 @@ class TestNullableColumnBuilder[T <: DataType, JvmType](columnType: ColumnType[T
with NullableColumnBuilder
object TestNullableColumnBuilder {
def apply[T <: DataType, JvmType](columnType: ColumnType[T, JvmType], initialSize: Int = 0) = {
def apply[T <: DataType, JvmType](columnType: ColumnType[T, JvmType], initialSize: Int = 0)
: TestNullableColumnBuilder[T, JvmType] = {
val builder = new TestNullableColumnBuilder(columnType)
builder.initialize(initialSize)
builder

View file

@ -35,7 +35,7 @@ object TestCompressibleColumnBuilder {
def apply[T <: NativeType](
columnStats: ColumnStats,
columnType: NativeColumnType[T],
scheme: CompressionScheme) = {
scheme: CompressionScheme): TestCompressibleColumnBuilder[T] = {
val builder = new TestCompressibleColumnBuilder(columnStats, columnType, Seq(scheme))
builder.initialize(0, "", useCompression = true)

View file

@ -45,10 +45,12 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
conn = DriverManager.getConnection(url, properties)
conn.prepareStatement("create schema test").executeUpdate()
conn.prepareStatement("create table test.people (name TEXT(32) NOT NULL, theid INTEGER NOT NULL)").executeUpdate()
conn.prepareStatement(
"create table test.people (name TEXT(32) NOT NULL, theid INTEGER NOT NULL)").executeUpdate()
conn.prepareStatement("insert into test.people values ('fred', 1)").executeUpdate()
conn.prepareStatement("insert into test.people values ('mary', 2)").executeUpdate()
conn.prepareStatement("insert into test.people values ('joe ''foo'' \"bar\"', 3)").executeUpdate()
conn.prepareStatement(
"insert into test.people values ('joe ''foo'' \"bar\"', 3)").executeUpdate()
conn.commit()
sql(
@ -132,25 +134,25 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
}
test("SELECT *") {
assert(sql("SELECT * FROM foobar").collect().size == 3)
assert(sql("SELECT * FROM foobar").collect().size === 3)
}
test("SELECT * WHERE (simple predicates)") {
assert(sql("SELECT * FROM foobar WHERE THEID < 1").collect().size == 0)
assert(sql("SELECT * FROM foobar WHERE THEID != 2").collect().size == 2)
assert(sql("SELECT * FROM foobar WHERE THEID = 1").collect().size == 1)
assert(sql("SELECT * FROM foobar WHERE NAME = 'fred'").collect().size == 1)
assert(sql("SELECT * FROM foobar WHERE NAME > 'fred'").collect().size == 2)
assert(sql("SELECT * FROM foobar WHERE NAME != 'fred'").collect().size == 2)
assert(sql("SELECT * FROM foobar WHERE THEID < 1").collect().size === 0)
assert(sql("SELECT * FROM foobar WHERE THEID != 2").collect().size === 2)
assert(sql("SELECT * FROM foobar WHERE THEID = 1").collect().size === 1)
assert(sql("SELECT * FROM foobar WHERE NAME = 'fred'").collect().size === 1)
assert(sql("SELECT * FROM foobar WHERE NAME > 'fred'").collect().size === 2)
assert(sql("SELECT * FROM foobar WHERE NAME != 'fred'").collect().size === 2)
}
test("SELECT * WHERE (quoted strings)") {
assert(sql("select * from foobar").where('NAME === "joe 'foo' \"bar\"").collect().size == 1)
assert(sql("select * from foobar").where('NAME === "joe 'foo' \"bar\"").collect().size === 1)
}
test("SELECT first field") {
val names = sql("SELECT NAME FROM foobar").collect().map(x => x.getString(0)).sortWith(_ < _)
assert(names.size == 3)
assert(names.size === 3)
assert(names(0).equals("fred"))
assert(names(1).equals("joe 'foo' \"bar\""))
assert(names(2).equals("mary"))
@ -158,10 +160,10 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
test("SELECT second field") {
val ids = sql("SELECT THEID FROM foobar").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size == 3)
assert(ids(0) == 1)
assert(ids(1) == 2)
assert(ids(2) == 3)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("SELECT * partitioned") {
@ -169,46 +171,46 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
}
test("SELECT WHERE (simple predicates) partitioned") {
assert(sql("SELECT * FROM parts WHERE THEID < 1").collect().size == 0)
assert(sql("SELECT * FROM parts WHERE THEID != 2").collect().size == 2)
assert(sql("SELECT THEID FROM parts WHERE THEID = 1").collect().size == 1)
assert(sql("SELECT * FROM parts WHERE THEID < 1").collect().size === 0)
assert(sql("SELECT * FROM parts WHERE THEID != 2").collect().size === 2)
assert(sql("SELECT THEID FROM parts WHERE THEID = 1").collect().size === 1)
}
test("SELECT second field partitioned") {
val ids = sql("SELECT THEID FROM parts").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size == 3)
assert(ids(0) == 1)
assert(ids(1) == 2)
assert(ids(2) == 3)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("Basic API") {
assert(TestSQLContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE").collect.size == 3)
assert(TestSQLContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE").collect().size === 3)
}
test("Partitioning via JDBCPartitioningInfo API") {
assert(TestSQLContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE", "THEID", 0, 4, 3)
.collect.size == 3)
.collect.size === 3)
}
test("Partitioning via list-of-where-clauses API") {
val parts = Array[String]("THEID < 2", "THEID >= 2")
assert(TestSQLContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts).collect.size == 3)
assert(TestSQLContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts).collect().size === 3)
}
test("H2 integral types") {
val rows = sql("SELECT * FROM inttypes WHERE A IS NOT NULL").collect()
assert(rows.size == 1)
assert(rows(0).getInt(0) == 1)
assert(rows(0).getBoolean(1) == false)
assert(rows(0).getInt(2) == 3)
assert(rows(0).getInt(3) == 4)
assert(rows(0).getLong(4) == 1234567890123L)
assert(rows.size === 1)
assert(rows(0).getInt(0) === 1)
assert(rows(0).getBoolean(1) === false)
assert(rows(0).getInt(2) === 3)
assert(rows(0).getInt(3) === 4)
assert(rows(0).getLong(4) === 1234567890123L)
}
test("H2 null entries") {
val rows = sql("SELECT * FROM inttypes WHERE A IS NULL").collect()
assert(rows.size == 1)
assert(rows.size === 1)
assert(rows(0).isNullAt(0))
assert(rows(0).isNullAt(1))
assert(rows(0).isNullAt(2))
@ -230,27 +232,27 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
val rows = sql("SELECT * FROM timetypes").collect()
val cal = new GregorianCalendar(java.util.Locale.ROOT)
cal.setTime(rows(0).getAs[java.sql.Timestamp](0))
assert(cal.get(Calendar.HOUR_OF_DAY) == 12)
assert(cal.get(Calendar.MINUTE) == 34)
assert(cal.get(Calendar.SECOND) == 56)
assert(cal.get(Calendar.HOUR_OF_DAY) === 12)
assert(cal.get(Calendar.MINUTE) === 34)
assert(cal.get(Calendar.SECOND) === 56)
cal.setTime(rows(0).getAs[java.sql.Timestamp](1))
assert(cal.get(Calendar.YEAR) == 1996)
assert(cal.get(Calendar.MONTH) == 0)
assert(cal.get(Calendar.DAY_OF_MONTH) == 1)
assert(cal.get(Calendar.YEAR) === 1996)
assert(cal.get(Calendar.MONTH) === 0)
assert(cal.get(Calendar.DAY_OF_MONTH) === 1)
cal.setTime(rows(0).getAs[java.sql.Timestamp](2))
assert(cal.get(Calendar.YEAR) == 2002)
assert(cal.get(Calendar.MONTH) == 1)
assert(cal.get(Calendar.DAY_OF_MONTH) == 20)
assert(cal.get(Calendar.HOUR) == 11)
assert(cal.get(Calendar.MINUTE) == 22)
assert(cal.get(Calendar.SECOND) == 33)
assert(rows(0).getAs[java.sql.Timestamp](2).getNanos == 543543543)
assert(cal.get(Calendar.YEAR) === 2002)
assert(cal.get(Calendar.MONTH) === 1)
assert(cal.get(Calendar.DAY_OF_MONTH) === 20)
assert(cal.get(Calendar.HOUR) === 11)
assert(cal.get(Calendar.MINUTE) === 22)
assert(cal.get(Calendar.SECOND) === 33)
assert(rows(0).getAs[java.sql.Timestamp](2).getNanos === 543543543)
}
test("H2 floating-point types") {
val rows = sql("SELECT * FROM flttypes").collect()
assert(rows(0).getDouble(0) == 1.00000000000000022) // Yes, I meant ==.
assert(rows(0).getDouble(1) == 1.00000011920928955) // Yes, I meant ==.
assert(rows(0).getDouble(0) === 1.00000000000000022) // Yes, I meant ==.
assert(rows(0).getDouble(1) === 1.00000011920928955) // Yes, I meant ==.
assert(rows(0).getAs[BigDecimal](2)
.equals(new BigDecimal("123456789012345.54321543215432100000")))
}
@ -264,7 +266,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))
val rows = sql("SELECT * FROM hack").collect()
assert(rows(0).getDouble(0) == 1.00000011920928955) // Yes, I meant ==.
assert(rows(0).getDouble(0) === 1.00000011920928955) // Yes, I meant ==.
// For some reason, H2 computes this square incorrectly...
assert(math.abs(rows(0).getDouble(1) - 1.00000023841859331) < 1e-12)
}

View file

@ -380,8 +380,10 @@ class JsonSuite extends QueryTest {
sql("select * from jsonTable"),
Row("true", 11L, null, 1.1, "13.1", "str1") ::
Row("12", null, new java.math.BigDecimal("21474836470.9"), null, null, "true") ::
Row("false", 21474836470L, new java.math.BigDecimal("92233720368547758070"), 100, "str1", "false") ::
Row(null, 21474836570L, new java.math.BigDecimal("1.1"), 21474836470L, "92233720368547758070", null) :: Nil
Row("false", 21474836470L,
new java.math.BigDecimal("92233720368547758070"), 100, "str1", "false") ::
Row(null, 21474836570L,
new java.math.BigDecimal("1.1"), 21474836470L, "92233720368547758070", null) :: Nil
)
// Number and Boolean conflict: resolve the type as number in this query.
@ -404,7 +406,8 @@ class JsonSuite extends QueryTest {
// Widening to DecimalType
checkAnswer(
sql("select num_num_2 + 1.2 from jsonTable where num_num_2 > 1.1"),
Row(new java.math.BigDecimal("21474836472.1")) :: Row(new java.math.BigDecimal("92233720368547758071.2")) :: Nil
Row(new java.math.BigDecimal("21474836472.1")) ::
Row(new java.math.BigDecimal("92233720368547758071.2")) :: Nil
)
// Widening to DoubleType
@ -913,8 +916,10 @@ class JsonSuite extends QueryTest {
df1.registerTempTable("applySchema1")
val df2 = df1.toDF
val result = df2.toJSON.collect()
// scalastyle:off
assert(result(0) === "{\"f1\":1,\"f2\":\"A1\",\"f3\":true,\"f4\":[\"1\",\" A1\",\" true\",\" null\"]}")
assert(result(3) === "{\"f1\":4,\"f2\":\"D4\",\"f3\":true,\"f4\":[\"4\",\" D4\",\" true\",\" 2147483644\"],\"f5\":2147483644}")
// scalastyle:on
val schema2 = StructType(
StructField("f1", StructType(
@ -968,7 +973,8 @@ class JsonSuite extends QueryTest {
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from complexTable"),
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] " +
" from complexTable"),
Row(new java.math.BigDecimal("922337203685477580700"),
new java.math.BigDecimal("-922337203685477580800"), null)
)
@ -1008,7 +1014,8 @@ class JsonSuite extends QueryTest {
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from complexTable"),
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] " +
"from complexTable"),
Row(5, null)
)
}

View file

@ -218,7 +218,7 @@ class ParquetIOSuiteBase extends QueryTest with ParquetTest {
}
test("compression codec") {
def compressionCodecFor(path: String) = {
def compressionCodecFor(path: String): String = {
val codecs = ParquetTypesConverter
.readMetaData(new Path(path), Some(configuration))
.getBlocks

View file

@ -180,10 +180,12 @@ class ParquetSchemaSuite extends FunSuite with ParquetTest {
val caseClassString =
"StructType(List(StructField(c1,IntegerType,false), StructField(c2,BinaryType,true)))"
// scalastyle:off
val jsonString =
"""
|{"type":"struct","fields":[{"name":"c1","type":"integer","nullable":false,"metadata":{}},{"name":"c2","type":"binary","nullable":true,"metadata":{}}]}
""".stripMargin
// scalastyle:on
val fromCaseClassString = ParquetTypesConverter.convertFromString(caseClassString)
val fromJson = ParquetTypesConverter.convertFromString(jsonString)

View file

@ -17,6 +17,7 @@
package org.apache.spark.sql.sources
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.types._
@ -31,7 +32,7 @@ class DDLScanSource extends RelationProvider {
case class SimpleDDLScan(from: Int, to: Int)(@transient val sqlContext: SQLContext)
extends BaseRelation with TableScan {
override def schema =
override def schema: StructType =
StructType(Seq(
StructField("intType", IntegerType, nullable = false,
new MetadataBuilder().putString("comment", "test comment").build()),
@ -57,8 +58,9 @@ case class SimpleDDLScan(from: Int, to: Int)(@transient val sqlContext: SQLConte
))
override def buildScan() = sqlContext.sparkContext.parallelize(from to to).
map(e => Row(s"people$e", e * 2))
override def buildScan(): RDD[Row] = {
sqlContext.sparkContext.parallelize(from to to).map(e => Row(s"people$e", e * 2))
}
}
class DDLTestSuite extends DataSourceTest {

View file

@ -19,6 +19,7 @@ package org.apache.spark.sql.sources
import scala.language.existentials
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.types._
@ -41,7 +42,7 @@ case class SimpleFilteredScan(from: Int, to: Int)(@transient val sqlContext: SQL
StructField("b", IntegerType, nullable = false) ::
StructField("c", StringType, nullable = false) :: Nil)
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]) = {
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
val rowBuilders = requiredColumns.map {
case "a" => (i: Int) => Seq(i)
case "b" => (i: Int) => Seq(i * 2)

View file

@ -19,6 +19,7 @@ package org.apache.spark.sql.sources
import scala.language.existentials
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.types._
@ -34,12 +35,12 @@ case class SimplePrunedScan(from: Int, to: Int)(@transient val sqlContext: SQLCo
extends BaseRelation
with PrunedScan {
override def schema =
override def schema: StructType =
StructType(
StructField("a", IntegerType, nullable = false) ::
StructField("b", IntegerType, nullable = false) :: Nil)
override def buildScan(requiredColumns: Array[String]) = {
override def buildScan(requiredColumns: Array[String]): RDD[Row] = {
val rowBuilders = requiredColumns.map {
case "a" => (i: Int) => Seq(i)
case "b" => (i: Int) => Seq(i * 2)

View file

@ -19,6 +19,7 @@ package org.apache.spark.sql.sources
import java.sql.{Timestamp, Date}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.types._
@ -35,10 +36,10 @@ class SimpleScanSource extends RelationProvider {
case class SimpleScan(from: Int, to: Int)(@transient val sqlContext: SQLContext)
extends BaseRelation with TableScan {
override def schema =
override def schema: StructType =
StructType(StructField("i", IntegerType, nullable = false) :: Nil)
override def buildScan() = sqlContext.sparkContext.parallelize(from to to).map(Row(_))
override def buildScan(): RDD[Row] = sqlContext.sparkContext.parallelize(from to to).map(Row(_))
}
class AllDataTypesScanSource extends SchemaRelationProvider {
@ -57,9 +58,9 @@ case class AllDataTypesScan(
extends BaseRelation
with TableScan {
override def schema = userSpecifiedSchema
override def schema: StructType = userSpecifiedSchema
override def buildScan() = {
override def buildScan(): RDD[Row] = {
sqlContext.sparkContext.parallelize(from to to).map { i =>
Row(
s"str_$i",

View file

@ -136,7 +136,7 @@ class ErrorPositionSuite extends QueryTest with BeforeAndAfter {
* @param query the query to analyze
* @param token a unique token in the string that should be indicated by the exception
*/
def positionTest(name: String, query: String, token: String) = {
def positionTest(name: String, query: String, token: String): Unit = {
def parseTree =
Try(quietly(HiveQl.dumpTree(HiveQl.getAst(query)))).getOrElse("<failed to parse>")

View file

@ -116,21 +116,20 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
}
def checkDataType(dt1: Seq[DataType], dt2: Seq[DataType]): Unit = {
dt1.zip(dt2).map {
case (dd1, dd2) =>
dt1.zip(dt2).foreach { case (dd1, dd2) =>
assert(dd1.getClass === dd2.getClass) // DecimalType doesn't has the default precision info
}
}
def checkValues(row1: Seq[Any], row2: Seq[Any]): Unit = {
row1.zip(row2).map {
case (r1, r2) => checkValue(r1, r2)
row1.zip(row2).foreach { case (r1, r2) =>
checkValue(r1, r2)
}
}
def checkValues(row1: Seq[Any], row2: Row): Unit = {
row1.zip(row2.toSeq).map {
case (r1, r2) => checkValue(r1, r2)
row1.zip(row2.toSeq).foreach { case (r1, r2) =>
checkValue(r1, r2)
}
}
@ -141,7 +140,7 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
assert(r1.compare(r2) === 0)
case (r1: Array[Byte], r2: Array[Byte])
if r1 != null && r2 != null && r1.length == r2.length =>
r1.zip(r2).map { case (b1, b2) => assert(b1 === b2) }
r1.zip(r2).foreach { case (b1, b2) => assert(b1 === b2) }
case (r1, r2) => assert(r1 === r2)
}
}
@ -166,7 +165,8 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
val constantData = constantExprs.map(_.eval())
val constantNullData = constantData.map(_ => null)
val constantWritableOIs = constantExprs.map(e => toWritableInspector(e.dataType))
val constantNullWritableOIs = constantExprs.map(e => toInspector(Literal.create(null, e.dataType)))
val constantNullWritableOIs =
constantExprs.map(e => toInspector(Literal.create(null, e.dataType)))
checkValues(constantData, constantData.zip(constantWritableOIs).map {
case (d, oi) => unwrap(wrap(d, oi), oi)
@ -202,7 +202,8 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
case (t, idx) => StructField(s"c_$idx", t)
})
checkValues(row, unwrap(wrap(Row.fromSeq(row), toInspector(dt)), toInspector(dt)).asInstanceOf[Row])
checkValues(row,
unwrap(wrap(Row.fromSeq(row), toInspector(dt)), toInspector(dt)).asInstanceOf[Row])
checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
}
@ -212,8 +213,10 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
val d = row(0) :: row(0) :: Nil
checkValue(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt)))
checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
checkValue(d, unwrap(wrap(d, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
checkValue(d, unwrap(wrap(null, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
checkValue(d,
unwrap(wrap(d, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
checkValue(d,
unwrap(wrap(null, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
}
test("wrap / unwrap Map Type") {
@ -222,7 +225,9 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors {
val d = Map(row(0) -> row(1))
checkValue(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt)))
checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt)))
checkValue(d, unwrap(wrap(d, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
checkValue(d, unwrap(wrap(null, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
checkValue(d,
unwrap(wrap(d, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
checkValue(d,
unwrap(wrap(null, toInspector(Literal.create(d, dt))), toInspector(Literal.create(d, dt))))
}
}

View file

@ -115,11 +115,36 @@ class InsertIntoHiveTableSuite extends QueryTest with BeforeAndAfter {
test("SPARK-4203:random partition directory order") {
sql("CREATE TABLE tmp_table (key int, value string)")
val tmpDir = Utils.createTempDir()
sql(s"CREATE TABLE table_with_partition(c1 string) PARTITIONED by (p1 string,p2 string,p3 string,p4 string,p5 string) location '${tmpDir.toURI.toString}' ")
sql("INSERT OVERWRITE TABLE table_with_partition partition (p1='a',p2='b',p3='c',p4='c',p5='1') SELECT 'blarr' FROM tmp_table")
sql("INSERT OVERWRITE TABLE table_with_partition partition (p1='a',p2='b',p3='c',p4='c',p5='2') SELECT 'blarr' FROM tmp_table")
sql("INSERT OVERWRITE TABLE table_with_partition partition (p1='a',p2='b',p3='c',p4='c',p5='3') SELECT 'blarr' FROM tmp_table")
sql("INSERT OVERWRITE TABLE table_with_partition partition (p1='a',p2='b',p3='c',p4='c',p5='4') SELECT 'blarr' FROM tmp_table")
sql(
s"""
|CREATE TABLE table_with_partition(c1 string)
|PARTITIONED by (p1 string,p2 string,p3 string,p4 string,p5 string)
|location '${tmpDir.toURI.toString}'
""".stripMargin)
sql(
"""
|INSERT OVERWRITE TABLE table_with_partition
|partition (p1='a',p2='b',p3='c',p4='c',p5='1')
|SELECT 'blarr' FROM tmp_table
""".stripMargin)
sql(
"""
|INSERT OVERWRITE TABLE table_with_partition
|partition (p1='a',p2='b',p3='c',p4='c',p5='2')
|SELECT 'blarr' FROM tmp_table
""".stripMargin)
sql(
"""
|INSERT OVERWRITE TABLE table_with_partition
|partition (p1='a',p2='b',p3='c',p4='c',p5='3')
|SELECT 'blarr' FROM tmp_table
""".stripMargin)
sql(
"""
|INSERT OVERWRITE TABLE table_with_partition
|partition (p1='a',p2='b',p3='c',p4='c',p5='4')
|SELECT 'blarr' FROM tmp_table
""".stripMargin)
def listFolders(path: File, acc: List[String]): List[List[String]] = {
val dir = path.listFiles()
val folders = dir.filter(_.isDirectory).toList
@ -196,34 +221,42 @@ class InsertIntoHiveTableSuite extends QueryTest with BeforeAndAfter {
testData.registerTempTable("testData")
val testDatawithNull = TestHive.sparkContext.parallelize(
(1 to 10).map(i => ThreeCloumntable(i, i.toString,null))).toDF()
(1 to 10).map(i => ThreeCloumntable(i, i.toString, null))).toDF()
val tmpDir = Utils.createTempDir()
sql(s"CREATE TABLE table_with_partition(key int,value string) PARTITIONED by (ds string) location '${tmpDir.toURI.toString}' ")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='1') SELECT key,value FROM testData")
sql(
s"""
|CREATE TABLE table_with_partition(key int,value string)
|PARTITIONED by (ds string) location '${tmpDir.toURI.toString}'
""".stripMargin)
sql(
"""
|INSERT OVERWRITE TABLE table_with_partition
|partition (ds='1') SELECT key,value FROM testData
""".stripMargin)
// test schema the same between partition and table
sql("ALTER TABLE table_with_partition CHANGE COLUMN key key BIGINT")
checkAnswer(sql("select key,value from table_with_partition where ds='1' "),
testData.collect.toSeq
testData.collect().toSeq
)
// test difference type of field
sql("ALTER TABLE table_with_partition CHANGE COLUMN key key BIGINT")
checkAnswer(sql("select key,value from table_with_partition where ds='1' "),
testData.collect.toSeq
testData.collect().toSeq
)
// add column to table
sql("ALTER TABLE table_with_partition ADD COLUMNS(key1 string)")
checkAnswer(sql("select key,value,key1 from table_with_partition where ds='1' "),
testDatawithNull.collect.toSeq
testDatawithNull.collect().toSeq
)
// change column name to table
sql("ALTER TABLE table_with_partition CHANGE COLUMN key keynew BIGINT")
checkAnswer(sql("select keynew,value from table_with_partition where ds='1' "),
testData.collect.toSeq
testData.collect().toSeq
)
sql("DROP TABLE table_with_partition")

View file

@ -142,7 +142,7 @@ class StatisticsSuite extends QueryTest with BeforeAndAfterAll {
after: () => Unit,
query: String,
expectedAnswer: Seq[Row],
ct: ClassTag[_]) = {
ct: ClassTag[_]): Unit = {
before()
var df = sql(query)

View file

@ -28,6 +28,7 @@ import org.apache.spark.sql.hive.test.TestHive._
class BigDataBenchmarkSuite extends HiveComparisonTest {
val testDataDirectory = new File("target" + File.separator + "big-data-benchmark-testdata")
val userVisitPath = new File(testDataDirectory, "uservisits").getCanonicalPath
val testTables = Seq(
TestTable(
"rankings",
@ -63,7 +64,7 @@ class BigDataBenchmarkSuite extends HiveComparisonTest {
| searchWord STRING,
| duration INT)
| ROW FORMAT DELIMITED FIELDS TERMINATED BY ","
| STORED AS TEXTFILE LOCATION "${new File(testDataDirectory, "uservisits").getCanonicalPath}"
| STORED AS TEXTFILE LOCATION "$userVisitPath"
""".stripMargin.cmd),
TestTable(
"documents",
@ -83,7 +84,10 @@ class BigDataBenchmarkSuite extends HiveComparisonTest {
"SELECT pageURL, pageRank FROM rankings WHERE pageRank > 1")
createQueryTest("query2",
"SELECT SUBSTR(sourceIP, 1, 10), SUM(adRevenue) FROM uservisits GROUP BY SUBSTR(sourceIP, 1, 10)")
"""
|SELECT SUBSTR(sourceIP, 1, 10), SUM(adRevenue) FROM uservisits
|GROUP BY SUBSTR(sourceIP, 1, 10)
""".stripMargin)
createQueryTest("query3",
"""
@ -113,8 +117,8 @@ class BigDataBenchmarkSuite extends HiveComparisonTest {
|CREATE TABLE url_counts_total AS
| SELECT SUM(count) AS totalCount, destpage
| FROM url_counts_partial GROUP BY destpage
|-- The following queries run, but generate different results in HIVE likely because the UDF is not deterministic
|-- given different input splits.
|-- The following queries run, but generate different results in HIVE
|-- likely because the UDF is not deterministic given different input splits.
|-- SELECT CAST(SUM(count) AS INT) FROM url_counts_partial
|-- SELECT COUNT(*) FROM url_counts_partial
|-- SELECT * FROM url_counts_partial

View file

@ -255,8 +255,9 @@ abstract class HiveComparisonTest
.filterNot(_ contains "hive.outerjoin.supports.filters")
.filterNot(_ contains "hive.exec.post.hooks")
if (allQueries != queryList)
if (allQueries != queryList) {
logWarning(s"Simplifications made on unsupported operations for test $testCaseName")
}
lazy val consoleTestCase = {
val quotes = "\"\"\""
@ -305,13 +306,16 @@ abstract class HiveComparisonTest
try {
// Hooks often break the harness and don't really affect our test anyway, don't
// even try running them.
if (installHooksCommand.findAllMatchIn(queryString).nonEmpty)
if (installHooksCommand.findAllMatchIn(queryString).nonEmpty) {
sys.error("hive exec hooks not supported for tests.")
}
logWarning(s"Running query ${i+1}/${queryList.size} with hive.")
logWarning(s"Running query ${i + 1}/${queryList.size} with hive.")
// Analyze the query with catalyst to ensure test tables are loaded.
val answer = hiveQuery.analyzed match {
case _: ExplainCommand => Nil // No need to execute EXPLAIN queries as we don't check the output.
case _: ExplainCommand =>
// No need to execute EXPLAIN queries as we don't check the output.
Nil
case _ => TestHive.runSqlHive(queryString)
}
@ -394,21 +398,24 @@ abstract class HiveComparisonTest
case tf: org.scalatest.exceptions.TestFailedException => throw tf
case originalException: Exception =>
if (System.getProperty("spark.hive.canarytest") != null) {
// When we encounter an error we check to see if the environment is still okay by running a simple query.
// If this fails then we halt testing since something must have gone seriously wrong.
// When we encounter an error we check to see if the environment is still
// okay by running a simple query. If this fails then we halt testing since
// something must have gone seriously wrong.
try {
new TestHive.HiveQLQueryExecution("SELECT key FROM src").stringResult()
TestHive.runSqlHive("SELECT key FROM src")
} catch {
case e: Exception =>
logError(s"FATAL ERROR: Canary query threw $e This implies that the testing environment has likely been corrupted.")
// The testing setup traps exits so wait here for a long time so the developer can see when things started
// to go wrong.
logError(s"FATAL ERROR: Canary query threw $e This implies that the " +
"testing environment has likely been corrupted.")
// The testing setup traps exits so wait here for a long time so the developer
// can see when things started to go wrong.
Thread.sleep(1000000)
}
}
// If the canary query didn't fail then the environment is still okay, so just throw the original exception.
// If the canary query didn't fail then the environment is still okay,
// so just throw the original exception.
throw originalException
}
}

View file

@ -24,8 +24,9 @@ import org.apache.spark.sql.catalyst.util._
/**
* A framework for running the query tests that are listed as a set of text files.
*
* TestSuites that derive from this class must provide a map of testCaseName -> testCaseFiles that should be included.
* Additionally, there is support for whitelisting and blacklisting tests as development progresses.
* TestSuites that derive from this class must provide a map of testCaseName -> testCaseFiles
* that should be included. Additionally, there is support for whitelisting and blacklisting
* tests as development progresses.
*/
abstract class HiveQueryFileTest extends HiveComparisonTest {
/** A list of tests deemed out of scope and thus completely disregarded */
@ -54,15 +55,17 @@ abstract class HiveQueryFileTest extends HiveComparisonTest {
case (testCaseName, testCaseFile) =>
if (blackList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_)) {
logDebug(s"Blacklisted test skipped $testCaseName")
} else if (realWhiteList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) || runAll) {
} else if (realWhiteList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) ||
runAll) {
// Build a test case and submit it to scala test framework...
val queriesString = fileToString(testCaseFile)
createQueryTest(testCaseName, queriesString)
} else {
// Only output warnings for the built in whitelist as this clutters the output when the user
// trying to execute a single test from the commandline.
if(System.getProperty(whiteListProperty) == null && !runAll)
if (System.getProperty(whiteListProperty) == null && !runAll) {
ignore(testCaseName) {}
}
}
}
}

View file

@ -37,7 +37,8 @@ import org.apache.spark.sql.hive.test.TestHive._
case class TestData(a: Int, b: String)
/**
* A set of test cases expressed in Hive QL that are not covered by the tests included in the hive distribution.
* A set of test cases expressed in Hive QL that are not covered by the tests
* included in the hive distribution.
*/
class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
private val originalTimeZone = TimeZone.getDefault
@ -237,7 +238,8 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
}
createQueryTest("modulus",
"SELECT 11 % 10, IF((101.1 % 100.0) BETWEEN 1.01 AND 1.11, \"true\", \"false\"), (101 / 2) % 10 FROM src LIMIT 1")
"SELECT 11 % 10, IF((101.1 % 100.0) BETWEEN 1.01 AND 1.11, \"true\", \"false\"), " +
"(101 / 2) % 10 FROM src LIMIT 1")
test("Query expressed in SQL") {
setConf("spark.sql.dialect", "sql")
@ -309,7 +311,8 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
"SELECT * FROM src a JOIN src b ON a.key = b.key")
createQueryTest("small.cartesian",
"SELECT a.key, b.key FROM (SELECT key FROM src WHERE key < 1) a JOIN (SELECT key FROM src WHERE key = 2) b")
"SELECT a.key, b.key FROM (SELECT key FROM src WHERE key < 1) a JOIN " +
"(SELECT key FROM src WHERE key = 2) b")
createQueryTest("length.udf",
"SELECT length(\"test\") FROM src LIMIT 1")
@ -457,6 +460,7 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
createQueryTest("lateral view3",
"FROM src SELECT key, D.* lateral view explode(array(key+3, key+4)) D as CX")
// scalastyle:off
createQueryTest("lateral view4",
"""
|create table src_lv1 (key string, value string);
@ -466,6 +470,7 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
|insert overwrite table src_lv1 SELECT key, D.* lateral view explode(array(key+3, key+4)) D as CX
|insert overwrite table src_lv2 SELECT key, D.* lateral view explode(array(key+3, key+4)) D as CX
""".stripMargin)
// scalastyle:on
createQueryTest("lateral view5",
"FROM src SELECT explode(array(key+3, key+4))")
@ -584,7 +589,7 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
}
}
def isExplanation(result: DataFrame) = {
def isExplanation(result: DataFrame): Boolean = {
val explanation = result.select('plan).collect().map { case Row(plan: String) => plan }
explanation.contains("== Physical Plan ==")
}

View file

@ -25,7 +25,8 @@ case class Nested(a: Int, B: Int)
case class Data(a: Int, B: Int, n: Nested, nestedArray: Seq[Nested])
/**
* A set of test cases expressed in Hive QL that are not covered by the tests included in the hive distribution.
* A set of test cases expressed in Hive QL that are not covered by the tests
* included in the hive distribution.
*/
class HiveResolutionSuite extends HiveComparisonTest {

View file

@ -26,8 +26,9 @@ import org.apache.spark.sql.hive.test.TestHive
*/
class HiveSerDeSuite extends HiveComparisonTest with BeforeAndAfterAll {
override def beforeAll() = {
override def beforeAll(): Unit = {
TestHive.cacheTables = false
super.beforeAll()
}
createQueryTest(

View file

@ -35,8 +35,10 @@ class HiveTypeCoercionSuite extends HiveComparisonTest {
val nullVal = "null"
baseTypes.init.foreach { i =>
createQueryTest(s"case when then $i else $nullVal end ", s"SELECT case when true then $i else $nullVal end FROM src limit 1")
createQueryTest(s"case when then $nullVal else $i end ", s"SELECT case when true then $nullVal else $i end FROM src limit 1")
createQueryTest(s"case when then $i else $nullVal end ",
s"SELECT case when true then $i else $nullVal end FROM src limit 1")
createQueryTest(s"case when then $nullVal else $i end ",
s"SELECT case when true then $nullVal else $i end FROM src limit 1")
}
test("[SPARK-2210] boolean cast on boolean value should be removed") {

View file

@ -123,9 +123,10 @@ class HiveUdfSuite extends QueryTest {
IntegerCaseClass(1) :: IntegerCaseClass(2) :: Nil).toDF()
testData.registerTempTable("integerTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFIntegerToString AS '${classOf[UDFIntegerToString].getName}'")
val udfName = classOf[UDFIntegerToString].getName
sql(s"CREATE TEMPORARY FUNCTION testUDFIntegerToString AS '$udfName'")
checkAnswer(
sql("SELECT testUDFIntegerToString(i) FROM integerTable"), //.collect(),
sql("SELECT testUDFIntegerToString(i) FROM integerTable"),
Seq(Row("1"), Row("2")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFIntegerToString")
@ -141,7 +142,7 @@ class HiveUdfSuite extends QueryTest {
sql(s"CREATE TEMPORARY FUNCTION testUDFListListInt AS '${classOf[UDFListListInt].getName}'")
checkAnswer(
sql("SELECT testUDFListListInt(lli) FROM listListIntTable"), //.collect(),
sql("SELECT testUDFListListInt(lli) FROM listListIntTable"),
Seq(Row(0), Row(2), Row(13)))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFListListInt")
@ -156,7 +157,7 @@ class HiveUdfSuite extends QueryTest {
sql(s"CREATE TEMPORARY FUNCTION testUDFListString AS '${classOf[UDFListString].getName}'")
checkAnswer(
sql("SELECT testUDFListString(l) FROM listStringTable"), //.collect(),
sql("SELECT testUDFListString(l) FROM listStringTable"),
Seq(Row("a,b,c"), Row("d,e")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFListString")
@ -170,7 +171,7 @@ class HiveUdfSuite extends QueryTest {
sql(s"CREATE TEMPORARY FUNCTION testStringStringUdf AS '${classOf[UDFStringString].getName}'")
checkAnswer(
sql("SELECT testStringStringUdf(\"hello\", s) FROM stringTable"), //.collect(),
sql("SELECT testStringStringUdf(\"hello\", s) FROM stringTable"),
Seq(Row("hello world"), Row("hello goodbye")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testStringStringUdf")
@ -187,7 +188,7 @@ class HiveUdfSuite extends QueryTest {
sql(s"CREATE TEMPORARY FUNCTION testUDFTwoListList AS '${classOf[UDFTwoListList].getName}'")
checkAnswer(
sql("SELECT testUDFTwoListList(lli, lli) FROM TwoListTable"), //.collect(),
sql("SELECT testUDFTwoListList(lli, lli) FROM TwoListTable"),
Seq(Row("0, 0"), Row("2, 2"), Row("13, 13")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFTwoListList")
@ -247,7 +248,8 @@ class PairUdf extends GenericUDF {
override def initialize(p1: Array[ObjectInspector]): ObjectInspector =
ObjectInspectorFactory.getStandardStructObjectInspector(
Seq("id", "value"),
Seq(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaIntObjectInspector)
Seq(PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.javaIntObjectInspector)
)
override def evaluate(args: Array[DeferredObject]): AnyRef = {

View file

@ -143,7 +143,7 @@ class PruningSuite extends HiveComparisonTest with BeforeAndAfter {
sql: String,
expectedOutputColumns: Seq[String],
expectedScannedColumns: Seq[String],
expectedPartValues: Seq[Seq[String]]) = {
expectedPartValues: Seq[Seq[String]]): Unit = {
test(s"$testCaseName - pruning test") {
val plan = new TestHive.HiveQLQueryExecution(sql).executedPlan
val actualOutputColumns = plan.output.map(_.name)

View file

@ -422,7 +422,7 @@ class SQLQuerySuite extends QueryTest {
}
test("resolve udtf with single alias") {
val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i+1}]}"""))
val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i + 1}]}"""))
jsonRDD(rdd).registerTempTable("data")
val df = sql("SELECT explode(a) AS val FROM data")
val col = df("val")
@ -435,7 +435,7 @@ class SQLQuerySuite extends QueryTest {
// is not in a valid state (cannot be executed). Because of this bug, the analysis rule of
// PreInsertionCasts will actually start to work before ImplicitGenerate and then
// generates an invalid query plan.
val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i+1}]}"""))
val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i + 1}]}"""))
jsonRDD(rdd).registerTempTable("data")
val originalConf = getConf("spark.sql.hive.convertCTAS", "false")
setConf("spark.sql.hive.convertCTAS", "false")

View file

@ -1,4 +1,3 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -887,7 +886,11 @@ abstract class ParquetPartitioningTest extends QueryTest with BeforeAndAfterAll
test(s"SPARK-5775 read struct from $table") {
checkAnswer(
sql(s"SELECT p, structField.intStructField, structField.stringStructField FROM $table WHERE p = 1"),
sql(
s"""
|SELECT p, structField.intStructField, structField.stringStructField
|FROM $table WHERE p = 1
""".stripMargin),
(1 to 10).map(i => Row(1, i, f"${i}_string")))
}