From b84769a1073fde76a1a7efad51bf73ac1ee6db2a Mon Sep 17 00:00:00 2001
From: Matei Zaharia <matei@eecs.berkeley.edu>
Date: Sat, 13 Nov 2010 17:18:05 -0800
Subject: [PATCH] Modified project structure to work with buildr

---
 .gitignore                                    |  2 ++
 alltests                                      |  2 +-
 buildfile                                     | 22 +++++++++++++++++++
 run                                           |  4 ++--
 .../java/spark/compress/lzf/LZF.java          |  0
 .../spark/compress/lzf/LZFInputStream.java    |  0
 .../spark/compress/lzf/LZFOutputStream.java   |  0
 src/{ => main}/native/Makefile                |  0
 .../native/spark_compress_lzf_LZF.c           |  0
 src/{ => main}/scala/spark/Accumulators.scala |  0
 .../scala/spark/BoundedMemoryCache.scala      |  0
 src/{ => main}/scala/spark/Broadcast.scala    |  0
 src/{ => main}/scala/spark/Cache.scala        |  0
 .../scala/spark/ClosureCleaner.scala          |  0
 src/{ => main}/scala/spark/DfsShuffle.scala   |  0
 src/{ => main}/scala/spark/Executor.scala     |  0
 src/{ => main}/scala/spark/HadoopFile.scala   |  0
 src/{ => main}/scala/spark/HttpServer.scala   |  0
 src/{ => main}/scala/spark/Job.scala          |  0
 .../scala/spark/LocalFileShuffle.scala        |  0
 .../scala/spark/LocalScheduler.scala          |  0
 src/{ => main}/scala/spark/Logging.scala      |  0
 .../scala/spark/MesosScheduler.scala          |  0
 .../scala/spark/NumberedSplitRDD.scala        |  0
 .../scala/spark/ParallelArray.scala           |  0
 src/{ => main}/scala/spark/RDD.scala          |  0
 src/{ => main}/scala/spark/Scheduler.scala    |  0
 .../scala/spark/SerializableWritable.scala    |  0
 src/{ => main}/scala/spark/Shuffle.scala      |  0
 src/{ => main}/scala/spark/SimpleJob.scala    |  0
 .../scala/spark/SizeEstimator.scala           |  0
 .../scala/spark/SoftReferenceCache.scala      |  0
 src/{ => main}/scala/spark/SparkContext.scala |  0
 .../scala/spark/SparkException.scala          |  0
 src/{ => main}/scala/spark/Split.scala        |  0
 src/{ => main}/scala/spark/Task.scala         |  0
 src/{ => main}/scala/spark/TaskResult.scala   |  0
 src/{ => main}/scala/spark/Utils.scala        |  0
 .../scala/spark/WeakReferenceCache.scala      |  0
 .../spark/repl/ExecutorClassLoader.scala      |  0
 src/{ => main}/scala/spark/repl/Main.scala    |  0
 .../scala/spark/repl/SparkCompletion.scala    |  4 ++--
 .../spark/repl/SparkCompletionOutput.scala    |  0
 .../spark/repl/SparkInteractiveReader.scala   |  0
 .../scala/spark/repl/SparkInterpreter.scala   |  0
 .../spark/repl/SparkInterpreterLoop.scala     |  3 ++-
 .../spark/repl/SparkInterpreterSettings.scala |  0
 .../scala/spark/repl/SparkJLineReader.scala   |  0
 .../scala/spark/repl/SparkSimpleReader.scala  |  0
 .../spark/ParallelArraySplitSuite.scala       |  0
 src/test/{ => scala}/spark/ShuffleSuite.scala |  0
 .../{ => scala}/spark/repl/ReplSuite.scala    | 17 +++++++++++++-
 52 files changed, 47 insertions(+), 7 deletions(-)
 create mode 100644 buildfile
 rename src/{ => main}/java/spark/compress/lzf/LZF.java (100%)
 rename src/{ => main}/java/spark/compress/lzf/LZFInputStream.java (100%)
 rename src/{ => main}/java/spark/compress/lzf/LZFOutputStream.java (100%)
 rename src/{ => main}/native/Makefile (100%)
 rename src/{ => main}/native/spark_compress_lzf_LZF.c (100%)
 rename src/{ => main}/scala/spark/Accumulators.scala (100%)
 rename src/{ => main}/scala/spark/BoundedMemoryCache.scala (100%)
 rename src/{ => main}/scala/spark/Broadcast.scala (100%)
 rename src/{ => main}/scala/spark/Cache.scala (100%)
 rename src/{ => main}/scala/spark/ClosureCleaner.scala (100%)
 rename src/{ => main}/scala/spark/DfsShuffle.scala (100%)
 rename src/{ => main}/scala/spark/Executor.scala (100%)
 rename src/{ => main}/scala/spark/HadoopFile.scala (100%)
 rename src/{ => main}/scala/spark/HttpServer.scala (100%)
 rename src/{ => main}/scala/spark/Job.scala (100%)
 rename src/{ => main}/scala/spark/LocalFileShuffle.scala (100%)
 rename src/{ => main}/scala/spark/LocalScheduler.scala (100%)
 rename src/{ => main}/scala/spark/Logging.scala (100%)
 rename src/{ => main}/scala/spark/MesosScheduler.scala (100%)
 rename src/{ => main}/scala/spark/NumberedSplitRDD.scala (100%)
 rename src/{ => main}/scala/spark/ParallelArray.scala (100%)
 rename src/{ => main}/scala/spark/RDD.scala (100%)
 rename src/{ => main}/scala/spark/Scheduler.scala (100%)
 rename src/{ => main}/scala/spark/SerializableWritable.scala (100%)
 rename src/{ => main}/scala/spark/Shuffle.scala (100%)
 rename src/{ => main}/scala/spark/SimpleJob.scala (100%)
 rename src/{ => main}/scala/spark/SizeEstimator.scala (100%)
 rename src/{ => main}/scala/spark/SoftReferenceCache.scala (100%)
 rename src/{ => main}/scala/spark/SparkContext.scala (100%)
 rename src/{ => main}/scala/spark/SparkException.scala (100%)
 rename src/{ => main}/scala/spark/Split.scala (100%)
 rename src/{ => main}/scala/spark/Task.scala (100%)
 rename src/{ => main}/scala/spark/TaskResult.scala (100%)
 rename src/{ => main}/scala/spark/Utils.scala (100%)
 rename src/{ => main}/scala/spark/WeakReferenceCache.scala (100%)
 rename src/{ => main}/scala/spark/repl/ExecutorClassLoader.scala (100%)
 rename src/{ => main}/scala/spark/repl/Main.scala (100%)
 rename src/{ => main}/scala/spark/repl/SparkCompletion.scala (99%)
 rename src/{ => main}/scala/spark/repl/SparkCompletionOutput.scala (100%)
 rename src/{ => main}/scala/spark/repl/SparkInteractiveReader.scala (100%)
 rename src/{ => main}/scala/spark/repl/SparkInterpreter.scala (100%)
 rename src/{ => main}/scala/spark/repl/SparkInterpreterLoop.scala (99%)
 rename src/{ => main}/scala/spark/repl/SparkInterpreterSettings.scala (100%)
 rename src/{ => main}/scala/spark/repl/SparkJLineReader.scala (100%)
 rename src/{ => main}/scala/spark/repl/SparkSimpleReader.scala (100%)
 rename src/test/{ => scala}/spark/ParallelArraySplitSuite.scala (100%)
 rename src/test/{ => scala}/spark/ShuffleSuite.scala (100%)
 rename src/test/{ => scala}/spark/repl/ReplSuite.scala (89%)

diff --git a/.gitignore b/.gitignore
index 5abdec5d50..7c90cbfd72 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,5 @@ third_party/libmesos.dylib
 conf/java-opts
 conf/spark-env.sh
 conf/log4j.properties
+target
+reports
diff --git a/alltests b/alltests
index cd11604855..50802d4578 100755
--- a/alltests
+++ b/alltests
@@ -8,4 +8,4 @@ if [ -d $RESULTS_DIR ]; then
   rm -r $RESULTS_DIR
 fi
 mkdir -p $RESULTS_DIR
-$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/build/classes -u $RESULTS_DIR -o $@
+$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/target/test/classes -u $RESULTS_DIR -o $@
diff --git a/buildfile b/buildfile
new file mode 100644
index 0000000000..aaec9dc1bf
--- /dev/null
+++ b/buildfile
@@ -0,0 +1,22 @@
+require 'buildr/scala'
+
+# Version number for this release
+VERSION_NUMBER = "0.0.0"
+# Group identifier for your projects
+GROUP = "spark"
+COPYRIGHT = ""
+
+# Specify Maven 2.0 remote repositories here, like this:
+repositories.remote << "http://www.ibiblio.org/maven2/"
+
+THIRD_PARTY_JARS = Dir["third_party/**/*.jar"]
+
+desc "The Spark project"
+define "spark" do
+  project.version = VERSION_NUMBER
+  project.group = GROUP
+  manifest["Implementation-Vendor"] = COPYRIGHT
+  compile.with THIRD_PARTY_JARS
+  package(:jar)
+  test.using :scalatest, :fork => true
+end
diff --git a/run b/run
index d6f7d920c5..b3748e78ab 100755
--- a/run
+++ b/run
@@ -33,8 +33,8 @@ if [ -e $FWDIR/conf/java-opts ] ; then
 fi
 export JAVA_OPTS
 
-# Build up classpath
-CLASSPATH="$SPARK_CLASSPATH:$FWDIR/build/classes:$MESOS_CLASSPATH"
+# build up classpath
+CLASSPATH="$SPARK_CLASSPATH:$FWDIR/target/classes:$FWDIR/target/test/classes:$MESOS_CLASSPATH"
 CLASSPATH+=:$FWDIR/conf
 CLASSPATH+=:$FWDIR/third_party/mesos.jar
 CLASSPATH+=:$FWDIR/third_party/asm-3.2/lib/all/asm-all-3.2.jar
diff --git a/src/java/spark/compress/lzf/LZF.java b/src/main/java/spark/compress/lzf/LZF.java
similarity index 100%
rename from src/java/spark/compress/lzf/LZF.java
rename to src/main/java/spark/compress/lzf/LZF.java
diff --git a/src/java/spark/compress/lzf/LZFInputStream.java b/src/main/java/spark/compress/lzf/LZFInputStream.java
similarity index 100%
rename from src/java/spark/compress/lzf/LZFInputStream.java
rename to src/main/java/spark/compress/lzf/LZFInputStream.java
diff --git a/src/java/spark/compress/lzf/LZFOutputStream.java b/src/main/java/spark/compress/lzf/LZFOutputStream.java
similarity index 100%
rename from src/java/spark/compress/lzf/LZFOutputStream.java
rename to src/main/java/spark/compress/lzf/LZFOutputStream.java
diff --git a/src/native/Makefile b/src/main/native/Makefile
similarity index 100%
rename from src/native/Makefile
rename to src/main/native/Makefile
diff --git a/src/native/spark_compress_lzf_LZF.c b/src/main/native/spark_compress_lzf_LZF.c
similarity index 100%
rename from src/native/spark_compress_lzf_LZF.c
rename to src/main/native/spark_compress_lzf_LZF.c
diff --git a/src/scala/spark/Accumulators.scala b/src/main/scala/spark/Accumulators.scala
similarity index 100%
rename from src/scala/spark/Accumulators.scala
rename to src/main/scala/spark/Accumulators.scala
diff --git a/src/scala/spark/BoundedMemoryCache.scala b/src/main/scala/spark/BoundedMemoryCache.scala
similarity index 100%
rename from src/scala/spark/BoundedMemoryCache.scala
rename to src/main/scala/spark/BoundedMemoryCache.scala
diff --git a/src/scala/spark/Broadcast.scala b/src/main/scala/spark/Broadcast.scala
similarity index 100%
rename from src/scala/spark/Broadcast.scala
rename to src/main/scala/spark/Broadcast.scala
diff --git a/src/scala/spark/Cache.scala b/src/main/scala/spark/Cache.scala
similarity index 100%
rename from src/scala/spark/Cache.scala
rename to src/main/scala/spark/Cache.scala
diff --git a/src/scala/spark/ClosureCleaner.scala b/src/main/scala/spark/ClosureCleaner.scala
similarity index 100%
rename from src/scala/spark/ClosureCleaner.scala
rename to src/main/scala/spark/ClosureCleaner.scala
diff --git a/src/scala/spark/DfsShuffle.scala b/src/main/scala/spark/DfsShuffle.scala
similarity index 100%
rename from src/scala/spark/DfsShuffle.scala
rename to src/main/scala/spark/DfsShuffle.scala
diff --git a/src/scala/spark/Executor.scala b/src/main/scala/spark/Executor.scala
similarity index 100%
rename from src/scala/spark/Executor.scala
rename to src/main/scala/spark/Executor.scala
diff --git a/src/scala/spark/HadoopFile.scala b/src/main/scala/spark/HadoopFile.scala
similarity index 100%
rename from src/scala/spark/HadoopFile.scala
rename to src/main/scala/spark/HadoopFile.scala
diff --git a/src/scala/spark/HttpServer.scala b/src/main/scala/spark/HttpServer.scala
similarity index 100%
rename from src/scala/spark/HttpServer.scala
rename to src/main/scala/spark/HttpServer.scala
diff --git a/src/scala/spark/Job.scala b/src/main/scala/spark/Job.scala
similarity index 100%
rename from src/scala/spark/Job.scala
rename to src/main/scala/spark/Job.scala
diff --git a/src/scala/spark/LocalFileShuffle.scala b/src/main/scala/spark/LocalFileShuffle.scala
similarity index 100%
rename from src/scala/spark/LocalFileShuffle.scala
rename to src/main/scala/spark/LocalFileShuffle.scala
diff --git a/src/scala/spark/LocalScheduler.scala b/src/main/scala/spark/LocalScheduler.scala
similarity index 100%
rename from src/scala/spark/LocalScheduler.scala
rename to src/main/scala/spark/LocalScheduler.scala
diff --git a/src/scala/spark/Logging.scala b/src/main/scala/spark/Logging.scala
similarity index 100%
rename from src/scala/spark/Logging.scala
rename to src/main/scala/spark/Logging.scala
diff --git a/src/scala/spark/MesosScheduler.scala b/src/main/scala/spark/MesosScheduler.scala
similarity index 100%
rename from src/scala/spark/MesosScheduler.scala
rename to src/main/scala/spark/MesosScheduler.scala
diff --git a/src/scala/spark/NumberedSplitRDD.scala b/src/main/scala/spark/NumberedSplitRDD.scala
similarity index 100%
rename from src/scala/spark/NumberedSplitRDD.scala
rename to src/main/scala/spark/NumberedSplitRDD.scala
diff --git a/src/scala/spark/ParallelArray.scala b/src/main/scala/spark/ParallelArray.scala
similarity index 100%
rename from src/scala/spark/ParallelArray.scala
rename to src/main/scala/spark/ParallelArray.scala
diff --git a/src/scala/spark/RDD.scala b/src/main/scala/spark/RDD.scala
similarity index 100%
rename from src/scala/spark/RDD.scala
rename to src/main/scala/spark/RDD.scala
diff --git a/src/scala/spark/Scheduler.scala b/src/main/scala/spark/Scheduler.scala
similarity index 100%
rename from src/scala/spark/Scheduler.scala
rename to src/main/scala/spark/Scheduler.scala
diff --git a/src/scala/spark/SerializableWritable.scala b/src/main/scala/spark/SerializableWritable.scala
similarity index 100%
rename from src/scala/spark/SerializableWritable.scala
rename to src/main/scala/spark/SerializableWritable.scala
diff --git a/src/scala/spark/Shuffle.scala b/src/main/scala/spark/Shuffle.scala
similarity index 100%
rename from src/scala/spark/Shuffle.scala
rename to src/main/scala/spark/Shuffle.scala
diff --git a/src/scala/spark/SimpleJob.scala b/src/main/scala/spark/SimpleJob.scala
similarity index 100%
rename from src/scala/spark/SimpleJob.scala
rename to src/main/scala/spark/SimpleJob.scala
diff --git a/src/scala/spark/SizeEstimator.scala b/src/main/scala/spark/SizeEstimator.scala
similarity index 100%
rename from src/scala/spark/SizeEstimator.scala
rename to src/main/scala/spark/SizeEstimator.scala
diff --git a/src/scala/spark/SoftReferenceCache.scala b/src/main/scala/spark/SoftReferenceCache.scala
similarity index 100%
rename from src/scala/spark/SoftReferenceCache.scala
rename to src/main/scala/spark/SoftReferenceCache.scala
diff --git a/src/scala/spark/SparkContext.scala b/src/main/scala/spark/SparkContext.scala
similarity index 100%
rename from src/scala/spark/SparkContext.scala
rename to src/main/scala/spark/SparkContext.scala
diff --git a/src/scala/spark/SparkException.scala b/src/main/scala/spark/SparkException.scala
similarity index 100%
rename from src/scala/spark/SparkException.scala
rename to src/main/scala/spark/SparkException.scala
diff --git a/src/scala/spark/Split.scala b/src/main/scala/spark/Split.scala
similarity index 100%
rename from src/scala/spark/Split.scala
rename to src/main/scala/spark/Split.scala
diff --git a/src/scala/spark/Task.scala b/src/main/scala/spark/Task.scala
similarity index 100%
rename from src/scala/spark/Task.scala
rename to src/main/scala/spark/Task.scala
diff --git a/src/scala/spark/TaskResult.scala b/src/main/scala/spark/TaskResult.scala
similarity index 100%
rename from src/scala/spark/TaskResult.scala
rename to src/main/scala/spark/TaskResult.scala
diff --git a/src/scala/spark/Utils.scala b/src/main/scala/spark/Utils.scala
similarity index 100%
rename from src/scala/spark/Utils.scala
rename to src/main/scala/spark/Utils.scala
diff --git a/src/scala/spark/WeakReferenceCache.scala b/src/main/scala/spark/WeakReferenceCache.scala
similarity index 100%
rename from src/scala/spark/WeakReferenceCache.scala
rename to src/main/scala/spark/WeakReferenceCache.scala
diff --git a/src/scala/spark/repl/ExecutorClassLoader.scala b/src/main/scala/spark/repl/ExecutorClassLoader.scala
similarity index 100%
rename from src/scala/spark/repl/ExecutorClassLoader.scala
rename to src/main/scala/spark/repl/ExecutorClassLoader.scala
diff --git a/src/scala/spark/repl/Main.scala b/src/main/scala/spark/repl/Main.scala
similarity index 100%
rename from src/scala/spark/repl/Main.scala
rename to src/main/scala/spark/repl/Main.scala
diff --git a/src/scala/spark/repl/SparkCompletion.scala b/src/main/scala/spark/repl/SparkCompletion.scala
similarity index 99%
rename from src/scala/spark/repl/SparkCompletion.scala
rename to src/main/scala/spark/repl/SparkCompletion.scala
index d67438445b..ac70db4dbe 100644
--- a/src/scala/spark/repl/SparkCompletion.scala
+++ b/src/main/scala/spark/repl/SparkCompletion.scala
@@ -310,7 +310,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
       else xs.reduceLeft(_ zip _ takeWhile (x => x._1 == x._2) map (_._1) mkString)
 
     // This is jline's entry point for completion.
-    override def complete(_buf: String, cursor: Int, candidates: JList[String]): Int = {
+    override def complete(_buf: String, cursor: Int, candidates: JList[_]): Int = {
       val buf = onull(_buf)
       verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
       DBG("complete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
@@ -321,7 +321,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
           case Nil  => None
           case xs   =>
             // modify in place and return the position
-            xs foreach (candidates add _)
+            xs.foreach(x => candidates.asInstanceOf[JList[AnyRef]].add(x))
 
             // update the last buffer unless this is an alternatives list
             if (xs contains "") Some(p.cursor)
diff --git a/src/scala/spark/repl/SparkCompletionOutput.scala b/src/main/scala/spark/repl/SparkCompletionOutput.scala
similarity index 100%
rename from src/scala/spark/repl/SparkCompletionOutput.scala
rename to src/main/scala/spark/repl/SparkCompletionOutput.scala
diff --git a/src/scala/spark/repl/SparkInteractiveReader.scala b/src/main/scala/spark/repl/SparkInteractiveReader.scala
similarity index 100%
rename from src/scala/spark/repl/SparkInteractiveReader.scala
rename to src/main/scala/spark/repl/SparkInteractiveReader.scala
diff --git a/src/scala/spark/repl/SparkInterpreter.scala b/src/main/scala/spark/repl/SparkInterpreter.scala
similarity index 100%
rename from src/scala/spark/repl/SparkInterpreter.scala
rename to src/main/scala/spark/repl/SparkInterpreter.scala
diff --git a/src/scala/spark/repl/SparkInterpreterLoop.scala b/src/main/scala/spark/repl/SparkInterpreterLoop.scala
similarity index 99%
rename from src/scala/spark/repl/SparkInterpreterLoop.scala
rename to src/main/scala/spark/repl/SparkInterpreterLoop.scala
index 5bad0a37da..d4974009ce 100644
--- a/src/scala/spark/repl/SparkInterpreterLoop.scala
+++ b/src/main/scala/spark/repl/SparkInterpreterLoop.scala
@@ -129,7 +129,8 @@ extends InterpreterControl {
       settings.classpath append addedClasspath
       
     interpreter = new SparkInterpreter(settings, out) {
-      override protected def parentClassLoader = classOf[SparkInterpreterLoop].getClassLoader
+      override protected def parentClassLoader = 
+        classOf[SparkInterpreterLoop].getClassLoader
     }
     interpreter.setContextClassLoader()
     // interpreter.quietBind("settings", "spark.repl.SparkInterpreterSettings", interpreter.isettings)
diff --git a/src/scala/spark/repl/SparkInterpreterSettings.scala b/src/main/scala/spark/repl/SparkInterpreterSettings.scala
similarity index 100%
rename from src/scala/spark/repl/SparkInterpreterSettings.scala
rename to src/main/scala/spark/repl/SparkInterpreterSettings.scala
diff --git a/src/scala/spark/repl/SparkJLineReader.scala b/src/main/scala/spark/repl/SparkJLineReader.scala
similarity index 100%
rename from src/scala/spark/repl/SparkJLineReader.scala
rename to src/main/scala/spark/repl/SparkJLineReader.scala
diff --git a/src/scala/spark/repl/SparkSimpleReader.scala b/src/main/scala/spark/repl/SparkSimpleReader.scala
similarity index 100%
rename from src/scala/spark/repl/SparkSimpleReader.scala
rename to src/main/scala/spark/repl/SparkSimpleReader.scala
diff --git a/src/test/spark/ParallelArraySplitSuite.scala b/src/test/scala/spark/ParallelArraySplitSuite.scala
similarity index 100%
rename from src/test/spark/ParallelArraySplitSuite.scala
rename to src/test/scala/spark/ParallelArraySplitSuite.scala
diff --git a/src/test/spark/ShuffleSuite.scala b/src/test/scala/spark/ShuffleSuite.scala
similarity index 100%
rename from src/test/spark/ShuffleSuite.scala
rename to src/test/scala/spark/ShuffleSuite.scala
diff --git a/src/test/spark/repl/ReplSuite.scala b/src/test/scala/spark/repl/ReplSuite.scala
similarity index 89%
rename from src/test/spark/repl/ReplSuite.scala
rename to src/test/scala/spark/repl/ReplSuite.scala
index 8b38cde85f..225e766c71 100644
--- a/src/test/spark/repl/ReplSuite.scala
+++ b/src/test/scala/spark/repl/ReplSuite.scala
@@ -1,6 +1,10 @@
 package spark.repl
 
 import java.io._
+import java.net.URLClassLoader
+
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.JavaConversions._
 
 import org.scalatest.FunSuite
 
@@ -8,9 +12,20 @@ class ReplSuite extends FunSuite {
   def runInterpreter(master: String, input: String): String = {
     val in = new BufferedReader(new StringReader(input + "\n"))
     val out = new StringWriter()
+    val cl = getClass.getClassLoader
+    var paths = new ArrayBuffer[String]
+    if (cl.isInstanceOf[URLClassLoader]) {
+      val urlLoader = cl.asInstanceOf[URLClassLoader]
+      for (url <- urlLoader.getURLs) {
+        if (url.getProtocol == "file") {
+          paths += url.getFile
+        }
+      }
+    }
     val interp = new SparkInterpreterLoop(in, new PrintWriter(out), master)
     spark.repl.Main.interp = interp
-    interp.main(new Array[String](0))
+    val separator = System.getProperty("path.separator")
+    interp.main(Array("-classpath", paths.mkString(separator)))
     spark.repl.Main.interp = null
     return out.toString
   }
-- 
GitLab