Merge pull request #327 from lucarosellini/master
Added ‘-i’ command line option to Spark REPL We had to create a new implementation of both scala.tools.nsc.CompilerCommand and scala.tools.nsc.Settings, because using scala.tools.nsc.GenericRunnerSettings would bring in other options (-howtorun, -save and -execute) which don’t make sense in Spark. Any new Spark specific command line option could now be added to org.apache.spark.repl.SparkRunnerSettings class. Since the behavior of loading a script from the command line should be the same as loading it using the “:load” command inside the shell, the script should be loaded when the SparkContext is available, that’s why we had to move the call to ‘loadfiles(settings)’ _after_ the call to postInitialization(). This still doesn’t work if ‘isAsync = true’.
This commit is contained in:
commit
11891e68c3
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.spark.repl
|
||||
|
||||
import scala.tools.nsc.{Settings, CompilerCommand}
|
||||
import scala.Predef._
|
||||
|
||||
/**
|
||||
* Command class enabling Spark-specific command line options (provided by
|
||||
* <i>org.apache.spark.repl.SparkRunnerSettings</i>).
|
||||
*/
|
||||
class SparkCommandLine(args: List[String], override val settings: Settings)
|
||||
extends CompilerCommand(args, settings) {
|
||||
|
||||
def this(args: List[String], error: String => Unit) {
|
||||
this(args, new SparkRunnerSettings(error))
|
||||
}
|
||||
|
||||
def this(args: List[String]) {
|
||||
this(args, str => Console.println("Error: " + str))
|
||||
}
|
||||
}
|
|
@ -835,7 +835,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
|
|||
|
||||
// runs :load `file` on any files passed via -i
|
||||
def loadFiles(settings: Settings) = settings match {
|
||||
case settings: GenericRunnerSettings =>
|
||||
case settings: SparkRunnerSettings =>
|
||||
for (filename <- settings.loadfiles.value) {
|
||||
val cmd = ":load " + filename
|
||||
command(cmd)
|
||||
|
@ -902,7 +902,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
|
|||
addThunk(printWelcome())
|
||||
addThunk(initializeSpark())
|
||||
|
||||
loadFiles(settings)
|
||||
// it is broken on startup; go ahead and exit
|
||||
if (intp.reporter.hasErrors)
|
||||
return false
|
||||
|
@ -922,6 +921,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
|
|||
}
|
||||
// printWelcome()
|
||||
|
||||
loadFiles(settings)
|
||||
|
||||
try loop()
|
||||
catch AbstractOrMissingHandler()
|
||||
finally closeInterpreter()
|
||||
|
@ -957,7 +958,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
|
|||
|
||||
/** process command-line arguments and do as they request */
|
||||
def process(args: Array[String]): Boolean = {
|
||||
val command = new CommandLine(args.toList, echo)
|
||||
val command = new SparkCommandLine(args.toList, msg => echo(msg))
|
||||
def neededHelp(): String =
|
||||
(if (command.settings.help.value) command.usageMsg + "\n" else "") +
|
||||
(if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.spark.repl
|
||||
|
||||
import scala.tools.nsc.Settings
|
||||
|
||||
/**
|
||||
* <i>scala.tools.nsc.Settings</i> implementation adding Spark-specific REPL
|
||||
* command line options.
|
||||
*/
|
||||
class SparkRunnerSettings(error: String => Unit) extends Settings(error){
|
||||
|
||||
val loadfiles = MultiStringSetting(
|
||||
"-i",
|
||||
"file",
|
||||
"load a file (assumes the code is given interactively)")
|
||||
}
|
Loading…
Reference in a new issue