[SPARK-7927] whitespace fixes for Hive and ThriftServer.

So we can enable a whitespace enforcement rule in the style checker to save code review time.

Author: Reynold Xin <rxin@databricks.com>

Closes #6478 from rxin/whitespace-hive and squashes the following commits:

e01b0e0 [Reynold Xin] Fixed tests.
a3bba22 [Reynold Xin] [SPARK-7927] whitespace fixes for Hive and ThriftServer.

(cherry picked from commit ee6a0e12fb)
Signed-off-by: Reynold Xin <rxin@databricks.com>
This commit is contained in:
Reynold Xin 2015-05-28 18:08:56 -07:00
parent f4b135337c
commit 3b38c06f0d
14 changed files with 43 additions and 39 deletions

View file

@ -43,7 +43,7 @@ import org.apache.spark.util.Utils
private[hive] object SparkSQLCLIDriver {
private var prompt = "spark-sql"
private var continuedPrompt = "".padTo(prompt.length, ' ')
private var transport:TSocket = _
private var transport: TSocket = _
installSignalHandler()
@ -276,13 +276,13 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
driver.init()
val out = sessionState.out
val start:Long = System.currentTimeMillis()
val start: Long = System.currentTimeMillis()
if (sessionState.getIsVerbose) {
out.println(cmd)
}
val rc = driver.run(cmd)
val end = System.currentTimeMillis()
val timeTaken:Double = (end - start) / 1000.0
val timeTaken: Double = (end - start) / 1000.0
ret = rc.getResponseCode
if (ret != 0) {
@ -310,7 +310,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
res.clear()
}
} catch {
case e:IOException =>
case e: IOException =>
console.printError(
s"""Failed with exception ${e.getClass.getName}: ${e.getMessage}
|${org.apache.hadoop.util.StringUtils.stringifyException(e)}

View file

@ -77,7 +77,7 @@ private[ui] class ThriftServerPage(parent: ThriftServerTab) extends WebUIPage(""
[{id}]
</a>
}
val detail = if(info.state == ExecutionState.FAILED) info.detail else info.executePlan
val detail = if (info.state == ExecutionState.FAILED) info.detail else info.executePlan
<tr>
<td>{info.userName}</td>
<td>
@ -85,7 +85,7 @@ private[ui] class ThriftServerPage(parent: ThriftServerTab) extends WebUIPage(""
</td>
<td>{info.groupId}</td>
<td>{formatDate(info.startTimestamp)}</td>
<td>{if(info.finishTimestamp > 0) formatDate(info.finishTimestamp)}</td>
<td>{if (info.finishTimestamp > 0) formatDate(info.finishTimestamp)}</td>
<td>{formatDurationOption(Some(info.totalTime))}</td>
<td>{info.statement}</td>
<td>{info.state}</td>
@ -150,7 +150,7 @@ private[ui] class ThriftServerPage(parent: ThriftServerTab) extends WebUIPage(""
<td> {session.ip} </td>
<td> <a href={sessionLink}> {session.sessionId} </a> </td>
<td> {formatDate(session.startTimestamp)} </td>
<td> {if(session.finishTimestamp > 0) formatDate(session.finishTimestamp)} </td>
<td> {if (session.finishTimestamp > 0) formatDate(session.finishTimestamp)} </td>
<td> {formatDurationOption(Some(session.totalTime))} </td>
<td> {session.totalExecution.toString} </td>
</tr>

View file

@ -87,7 +87,7 @@ private[ui] class ThriftServerSessionPage(parent: ThriftServerTab)
[{id}]
</a>
}
val detail = if(info.state == ExecutionState.FAILED) info.detail else info.executePlan
val detail = if (info.state == ExecutionState.FAILED) info.detail else info.executePlan
<tr>
<td>{info.userName}</td>
<td>

View file

@ -73,7 +73,7 @@ class UISeleniumSuite
}
ignore("thrift server ui test") {
withJdbcStatement(statement =>{
withJdbcStatement { statement =>
val baseURL = s"http://localhost:$uiPort"
val queries = Seq(
@ -97,6 +97,6 @@ class UISeleniumSuite
findAll(cssSelector("""ul table tbody tr td""")).map(_.text).toList should contain (line)
}
}
})
}
}
}

View file

@ -29,10 +29,10 @@ import org.apache.spark.sql.hive.execution.{AddJar, AddFile, HiveNativeCommand}
private[hive] class ExtendedHiveQlParser extends AbstractSparkSQLParser {
// Keyword is a convention with AbstractSparkSQLParser, which will scan all of the `Keyword`
// properties via reflection the class in runtime for constructing the SqlLexical object
protected val ADD = Keyword("ADD")
protected val DFS = Keyword("DFS")
protected val ADD = Keyword("ADD")
protected val DFS = Keyword("DFS")
protected val FILE = Keyword("FILE")
protected val JAR = Keyword("JAR")
protected val JAR = Keyword("JAR")
protected lazy val start: Parser[LogicalPlan] = dfs | addJar | addFile | hiveQl

View file

@ -530,7 +530,7 @@ private[hive] object HiveContext {
val propMap: HashMap[String, String] = HashMap()
// We have to mask all properties in hive-site.xml that relates to metastore data source
// as we used a local metastore here.
HiveConf.ConfVars.values().foreach { confvar =>
HiveConf.ConfVars.values().foreach { confvar =>
if (confvar.varname.contains("datanucleus") || confvar.varname.contains("jdo")) {
propMap.put(confvar.varname, confvar.defaultVal)
}
@ -553,7 +553,7 @@ private[hive] object HiveContext {
}.mkString("{", ",", "}")
case (seq: Seq[_], ArrayType(typ, _)) =>
seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]")
case (map: Map[_,_], MapType(kType, vType, _)) =>
case (map: Map[_, _], MapType(kType, vType, _)) =>
map.map {
case (key, value) =>
toHiveStructString((key, kType)) + ":" + toHiveStructString((value, vType))

View file

@ -335,7 +335,7 @@ private[hive] trait HiveInspectors {
val allRefs = si.getAllStructFieldRefs
new GenericRow(
allRefs.map(r =>
unwrap(si.getStructFieldData(data,r), r.getFieldObjectInspector)).toArray)
unwrap(si.getStructFieldData(data, r), r.getFieldObjectInspector)).toArray)
}
@ -561,8 +561,8 @@ private[hive] trait HiveInspectors {
case DecimalType() => PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector
case StructType(fields) =>
ObjectInspectorFactory.getStandardStructObjectInspector(
java.util.Arrays.asList(fields.map(f => f.name) :_*),
java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) :_*))
java.util.Arrays.asList(fields.map(f => f.name) : _*),
java.util.Arrays.asList(fields.map(f => toInspector(f.dataType)) : _*))
}
/**
@ -677,8 +677,8 @@ private[hive] trait HiveInspectors {
getListTypeInfo(elemType.toTypeInfo)
case StructType(fields) =>
getStructTypeInfo(
java.util.Arrays.asList(fields.map(_.name) :_*),
java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) :_*))
java.util.Arrays.asList(fields.map(_.name) : _*),
java.util.Arrays.asList(fields.map(_.dataType.toTypeInfo) : _*))
case MapType(keyType, valueType, _) =>
getMapTypeInfo(keyType.toTypeInfo, valueType.toTypeInfo)
case BinaryType => binaryTypeInfo

View file

@ -546,13 +546,17 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
* UNIMPLEMENTED: It needs to be decided how we will persist in-memory tables to the metastore.
* For now, if this functionality is desired mix in the in-memory [[OverrideCatalog]].
*/
override def registerTable(tableIdentifier: Seq[String], plan: LogicalPlan): Unit = ???
override def registerTable(tableIdentifier: Seq[String], plan: LogicalPlan): Unit = {
throw new UnsupportedOperationException
}
/**
* UNIMPLEMENTED: It needs to be decided how we will persist in-memory tables to the metastore.
* For now, if this functionality is desired mix in the in-memory [[OverrideCatalog]].
*/
override def unregisterTable(tableIdentifier: Seq[String]): Unit = ???
override def unregisterTable(tableIdentifier: Seq[String]): Unit = {
throw new UnsupportedOperationException
}
override def unregisterAllTables(): Unit = {}
}
@ -725,7 +729,7 @@ private[hive] case class MetastoreRelation
val output = attributes ++ partitionKeys
/** An attribute map that can be used to lookup original attributes based on expression id. */
val attributeMap = AttributeMap(output.map(o => (o,o)))
val attributeMap = AttributeMap(output.map(o => (o, o)))
/** An attribute map for determining the ordinal for non-partition columns. */
val columnOrdinals = AttributeMap(attributes.zipWithIndex)

View file

@ -665,7 +665,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
HiveColumn(field.getName, field.getType, field.getComment)
})
}
case Token("TOK_TABLEROWFORMAT", Token("TOK_SERDEPROPS", child :: Nil) :: Nil)=>
case Token("TOK_TABLEROWFORMAT", Token("TOK_SERDEPROPS", child :: Nil) :: Nil) =>
val serdeParams = new java.util.HashMap[String, String]()
child match {
case Token("TOK_TABLEROWFORMATFIELD", rowChild1 :: rowChild2) =>
@ -775,7 +775,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
// Support "TRUNCATE TABLE table_name [PARTITION partition_spec]"
case Token("TOK_TRUNCATETABLE",
Token("TOK_TABLE_PARTITION",table)::Nil) => NativePlaceholder
Token("TOK_TABLE_PARTITION", table) :: Nil) => NativePlaceholder
case Token("TOK_QUERY", queryArgs)
if Seq("TOK_FROM", "TOK_INSERT").contains(queryArgs.head.getText) =>
@ -1151,7 +1151,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
case Seq(false, false) => Inner
}.toBuffer
val joinedTables = tables.reduceLeft(Join(_,_, Inner, None))
val joinedTables = tables.reduceLeft(Join(_, _, Inner, None))
// Must be transform down.
val joinedResult = joinedTables transform {
@ -1171,7 +1171,8 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
// worth the number of hacks that will be required to implement it. Namely, we need to add
// some sort of mapped star expansion that would expand all child output row to be similarly
// named output expressions where some aggregate expression has been applied (i.e. First).
??? // Aggregate(groups, Star(None, First(_)) :: Nil, joinedResult)
// Aggregate(groups, Star(None, First(_)) :: Nil, joinedResult)
throw new UnsupportedOperationException
case Token(allJoinTokens(joinToken),
relation1 ::

View file

@ -194,10 +194,9 @@ case class InsertIntoHiveTable(
if (partition.nonEmpty) {
// loadPartition call orders directories created on the iteration order of the this map
val orderedPartitionSpec = new util.LinkedHashMap[String,String]()
table.hiveQlTable.getPartCols().foreach{
entry=>
orderedPartitionSpec.put(entry.getName,partitionSpec.get(entry.getName).getOrElse(""))
val orderedPartitionSpec = new util.LinkedHashMap[String, String]()
table.hiveQlTable.getPartCols().foreach { entry =>
orderedPartitionSpec.put(entry.getName, partitionSpec.get(entry.getName).getOrElse(""))
}
val partVals = MetaStoreUtils.getPvals(table.hiveQlTable.getPartCols, partitionSpec)

View file

@ -216,7 +216,7 @@ case class HiveScriptIOSchema (
val columnTypes = attrs.map {
case aref: AttributeReference => aref.dataType
case e: NamedExpression => e.dataType
case _ => null
case _ => null
}
(columns, columnTypes)

View file

@ -316,7 +316,7 @@ private[hive] case class HiveWindowFunction(
// The object inspector of values returned from the Hive window function.
@transient
protected lazy val returnInspector = {
protected lazy val returnInspector = {
evaluator.init(GenericUDAFEvaluator.Mode.COMPLETE, inputInspectors)
}
@ -413,7 +413,7 @@ private[hive] case class HiveGenericUdaf(
protected lazy val resolver: AbstractGenericUDAFResolver = funcWrapper.createFunction()
@transient
protected lazy val objectInspector = {
protected lazy val objectInspector = {
val parameterInfo = new SimpleGenericUDAFParameterInfo(inspectors.toArray, false, false)
resolver.getEvaluator(parameterInfo)
.init(GenericUDAFEvaluator.Mode.COMPLETE, inspectors.toArray)
@ -446,7 +446,7 @@ private[hive] case class HiveUdaf(
new GenericUDAFBridge(funcWrapper.createFunction())
@transient
protected lazy val objectInspector = {
protected lazy val objectInspector = {
val parameterInfo = new SimpleGenericUDAFParameterInfo(inspectors.toArray, false, false)
resolver.getEvaluator(parameterInfo)
.init(GenericUDAFEvaluator.Mode.COMPLETE, inspectors.toArray)

View file

@ -71,7 +71,7 @@ private[hive] class SparkHiveWriterContainer(
@transient protected lazy val jobContext = newJobContext(conf.value, jID.value)
@transient private lazy val taskContext = newTaskAttemptContext(conf.value, taID.value)
@transient private lazy val outputFormat =
conf.value.getOutputFormat.asInstanceOf[HiveOutputFormat[AnyRef,Writable]]
conf.value.getOutputFormat.asInstanceOf[HiveOutputFormat[AnyRef, Writable]]
def driverSideSetup() {
setIDs(0, 0, 0)

View file

@ -189,7 +189,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
}
}
case class TestTable(name: String, commands: (()=>Unit)*)
case class TestTable(name: String, commands: (() => Unit)*)
protected[hive] implicit class SqlCmd(sql: String) {
def cmd: () => Unit = {
@ -253,8 +253,8 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
| 'serialization.format'='${classOf[TBinaryProtocol].getName}'
|)
|STORED AS
|INPUTFORMAT '${classOf[SequenceFileInputFormat[_,_]].getName}'
|OUTPUTFORMAT '${classOf[SequenceFileOutputFormat[_,_]].getName}'
|INPUTFORMAT '${classOf[SequenceFileInputFormat[_, _]].getName}'
|OUTPUTFORMAT '${classOf[SequenceFileOutputFormat[_, _]].getName}'
""".stripMargin)
runSqlHive(