Skip to content
Snippets Groups Projects
Commit 0b6db8c1 authored by Luca Rosellini's avatar Luca Rosellini
Browse files

Added ‘-i’ command line option to spark REPL.

We had to create a new implementation of both scala.tools.nsc.CompilerCommand and scala.tools.nsc.Settings, because using scala.tools.nsc.GenericRunnerSettings would bring in other options (-howtorun, -save and -execute) which don’t make sense in Spark.
Any new Spark specific command line option could now be added to org.apache.spark.repl.SparkRunnerSettings class.

Since the behavior of loading a script from the command line should be the same as loading it using the “:load” command inside the shell, the script should be loaded when the SparkContext is available, that’s why we had to move the call to ‘loadfiles(settings)’ _after_ the call to postInitialization(). This still doesn’t work if ‘isAsync = true’.
parent 87248bdd
No related branches found
No related tags found
No related merge requests found
package org.apache.spark.repl
import scala.tools.nsc.{Settings, CompilerCommand}
import scala.Predef._
/**
* Command class enabling Spark-specific command line options (provided by
* <i>org.apache.spark.repl.SparkRunnerSettings</i>).
*
* @author Luca Rosellini <luca@stratio.com>
*/
class SparkCommandLine(args: List[String], override val settings: Settings)
extends CompilerCommand(args, settings) {
def this(args: List[String], error: String => Unit) {
this(args, new SparkRunnerSettings(error))
}
def this(args: List[String]) {
this(args, str => Console.println("Error: " + str))
}
}
......@@ -835,7 +835,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
// runs :load `file` on any files passed via -i
def loadFiles(settings: Settings) = settings match {
case settings: GenericRunnerSettings =>
case settings: SparkRunnerSettings =>
for (filename <- settings.loadfiles.value) {
val cmd = ":load " + filename
command(cmd)
......@@ -902,7 +902,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
addThunk(printWelcome())
addThunk(initializeSpark())
loadFiles(settings)
// it is broken on startup; go ahead and exit
if (intp.reporter.hasErrors)
return false
......@@ -922,6 +921,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
}
// printWelcome()
loadFiles(settings)
try loop()
catch AbstractOrMissingHandler()
finally closeInterpreter()
......@@ -955,7 +956,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
/** process command-line arguments and do as they request */
def process(args: Array[String]): Boolean = {
val command = new CommandLine(args.toList, echo)
val command = new SparkCommandLine(args.toList, msg => echo(msg))
def neededHelp(): String =
(if (command.settings.help.value) command.usageMsg + "\n" else "") +
(if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
......
package org.apache.spark.repl
import scala.tools.nsc.Settings
/**
* <i>scala.tools.nsc.Settings</i> implementation adding Spark-specific REPL
* command line options.
*
* @author Luca Rosellini <luca@stratio.com>
*/
class SparkRunnerSettings(error: String => Unit) extends Settings(error){
val loadfiles = MultiStringSetting(
"-i",
"file",
"load a file (assumes the code is given interactively)")
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment