From e9a4fe12d3dc4afeb715c7649df2c5527a2d5d35 Mon Sep 17 00:00:00 2001
From: Michael Armbrust <michael@databricks.com>
Date: Sat, 7 Feb 2015 00:14:38 -0800
Subject: [PATCH] [BUILD] Add the ability to launch spark-shell from SBT.

Now you can quickly launch the spark-shell without building an assembly.  For quick development iteration run `build/sbt ~sparkShell` and calling exit will relaunch with any changes.

Author: Michael Armbrust <michael@databricks.com>

Closes #4438 from marmbrus/sparkShellSbt and squashes the following commits:

b4e44fe [Michael Armbrust] [BUILD] Add the ability to launch spark-shell from SBT.
---
 project/SparkBuild.scala | 23 +++++++++++++++++++++++
 1 file changed, 23 insertions(+)

diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 5e3051c091..95f8dfa3d2 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -177,6 +177,29 @@ object SparkBuild extends PomBuild {
 
   enable(Flume.settings)(streamingFlumeSink)
 
+
+  /**
+   * Adds the ability to run the spark shell directly from SBT without building an assembly
+   * jar.
+   *
+   * Usage: `build/sbt sparkShell`
+   */
+  val sparkShell = taskKey[Unit]("start a spark-shell.")
+
+  enable(Seq(
+    connectInput in run := true,
+    fork := true,
+    outputStrategy in run := Some (StdoutOutput),
+
+    javaOptions ++= Seq("-Xmx2G", "-XX:MaxPermSize=1g"),
+
+    sparkShell := {
+      (runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
+    }
+  ))(assembly)
+
+  enable(Seq(sparkShell := sparkShell in "assembly"))(spark)
+
   // TODO: move this to its upstream project.
   override def projectDefinitions(baseDirectory: File): Seq[Project] = {
     super.projectDefinitions(baseDirectory).map { x =>
-- 
GitLab