diff --git a/.gitignore b/.gitignore
index 5abdec5d50d8b68b20a6fbd05bdc1a952ac55370..7c90cbfd72dd7739404e9344d4f99e6a2ac694cb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,5 @@ third_party/libmesos.dylib
 conf/java-opts
 conf/spark-env.sh
 conf/log4j.properties
+target
+reports
diff --git a/alltests b/alltests
index cd11604855b549d2ac054edf806a984a8a5ef252..50802d4578288f4d2a5c00e83ea20dc2aa974a36 100755
--- a/alltests
+++ b/alltests
@@ -8,4 +8,4 @@ if [ -d $RESULTS_DIR ]; then
   rm -r $RESULTS_DIR
 fi
 mkdir -p $RESULTS_DIR
-$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/build/classes -u $RESULTS_DIR -o $@
+$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/target/test/classes -u $RESULTS_DIR -o $@
diff --git a/buildfile b/buildfile
new file mode 100644
index 0000000000000000000000000000000000000000..aaec9dc1bf8ebe378572e81e27e5b883dcfac618
--- /dev/null
+++ b/buildfile
@@ -0,0 +1,22 @@
+require 'buildr/scala'
+
+# Version number for this release
+VERSION_NUMBER = "0.0.0"
+# Group identifier for your projects
+GROUP = "spark"
+COPYRIGHT = ""
+
+# Specify Maven 2.0 remote repositories here, like this:
+repositories.remote << "http://www.ibiblio.org/maven2/"
+
+THIRD_PARTY_JARS = Dir["third_party/**/*.jar"]
+
+desc "The Spark project"
+define "spark" do
+  project.version = VERSION_NUMBER
+  project.group = GROUP
+  manifest["Implementation-Vendor"] = COPYRIGHT
+  compile.with THIRD_PARTY_JARS
+  package(:jar)
+  test.using :scalatest, :fork => true
+end
diff --git a/run b/run
index d6f7d920c53b0e0802e06f93c5912f3697245c0d..b3748e78ab20d94919769985b06a6c9490fd4ff6 100755
--- a/run
+++ b/run
@@ -33,8 +33,8 @@ if [ -e $FWDIR/conf/java-opts ] ; then
 fi
 export JAVA_OPTS
 
-# Build up classpath
-CLASSPATH="$SPARK_CLASSPATH:$FWDIR/build/classes:$MESOS_CLASSPATH"
+# build up classpath
+CLASSPATH="$SPARK_CLASSPATH:$FWDIR/target/classes:$FWDIR/target/test/classes:$MESOS_CLASSPATH"
 CLASSPATH+=:$FWDIR/conf
 CLASSPATH+=:$FWDIR/third_party/mesos.jar
 CLASSPATH+=:$FWDIR/third_party/asm-3.2/lib/all/asm-all-3.2.jar
diff --git a/src/java/spark/compress/lzf/LZF.java b/src/main/java/spark/compress/lzf/LZF.java
similarity index 100%
rename from src/java/spark/compress/lzf/LZF.java
rename to src/main/java/spark/compress/lzf/LZF.java
diff --git a/src/java/spark/compress/lzf/LZFInputStream.java b/src/main/java/spark/compress/lzf/LZFInputStream.java
similarity index 100%
rename from src/java/spark/compress/lzf/LZFInputStream.java
rename to src/main/java/spark/compress/lzf/LZFInputStream.java
diff --git a/src/java/spark/compress/lzf/LZFOutputStream.java b/src/main/java/spark/compress/lzf/LZFOutputStream.java
similarity index 100%
rename from src/java/spark/compress/lzf/LZFOutputStream.java
rename to src/main/java/spark/compress/lzf/LZFOutputStream.java
diff --git a/src/native/Makefile b/src/main/native/Makefile
similarity index 100%
rename from src/native/Makefile
rename to src/main/native/Makefile
diff --git a/src/native/spark_compress_lzf_LZF.c b/src/main/native/spark_compress_lzf_LZF.c
similarity index 100%
rename from src/native/spark_compress_lzf_LZF.c
rename to src/main/native/spark_compress_lzf_LZF.c
diff --git a/src/scala/spark/Accumulators.scala b/src/main/scala/spark/Accumulators.scala
similarity index 100%
rename from src/scala/spark/Accumulators.scala
rename to src/main/scala/spark/Accumulators.scala
diff --git a/src/scala/spark/BoundedMemoryCache.scala b/src/main/scala/spark/BoundedMemoryCache.scala
similarity index 100%
rename from src/scala/spark/BoundedMemoryCache.scala
rename to src/main/scala/spark/BoundedMemoryCache.scala
diff --git a/src/scala/spark/Broadcast.scala b/src/main/scala/spark/Broadcast.scala
similarity index 100%
rename from src/scala/spark/Broadcast.scala
rename to src/main/scala/spark/Broadcast.scala
diff --git a/src/scala/spark/Cache.scala b/src/main/scala/spark/Cache.scala
similarity index 100%
rename from src/scala/spark/Cache.scala
rename to src/main/scala/spark/Cache.scala
diff --git a/src/scala/spark/ClosureCleaner.scala b/src/main/scala/spark/ClosureCleaner.scala
similarity index 100%
rename from src/scala/spark/ClosureCleaner.scala
rename to src/main/scala/spark/ClosureCleaner.scala
diff --git a/src/scala/spark/DfsShuffle.scala b/src/main/scala/spark/DfsShuffle.scala
similarity index 100%
rename from src/scala/spark/DfsShuffle.scala
rename to src/main/scala/spark/DfsShuffle.scala
diff --git a/src/scala/spark/Executor.scala b/src/main/scala/spark/Executor.scala
similarity index 100%
rename from src/scala/spark/Executor.scala
rename to src/main/scala/spark/Executor.scala
diff --git a/src/scala/spark/HadoopFile.scala b/src/main/scala/spark/HadoopFile.scala
similarity index 100%
rename from src/scala/spark/HadoopFile.scala
rename to src/main/scala/spark/HadoopFile.scala
diff --git a/src/scala/spark/HttpServer.scala b/src/main/scala/spark/HttpServer.scala
similarity index 100%
rename from src/scala/spark/HttpServer.scala
rename to src/main/scala/spark/HttpServer.scala
diff --git a/src/scala/spark/Job.scala b/src/main/scala/spark/Job.scala
similarity index 100%
rename from src/scala/spark/Job.scala
rename to src/main/scala/spark/Job.scala
diff --git a/src/scala/spark/LocalFileShuffle.scala b/src/main/scala/spark/LocalFileShuffle.scala
similarity index 100%
rename from src/scala/spark/LocalFileShuffle.scala
rename to src/main/scala/spark/LocalFileShuffle.scala
diff --git a/src/scala/spark/LocalScheduler.scala b/src/main/scala/spark/LocalScheduler.scala
similarity index 100%
rename from src/scala/spark/LocalScheduler.scala
rename to src/main/scala/spark/LocalScheduler.scala
diff --git a/src/scala/spark/Logging.scala b/src/main/scala/spark/Logging.scala
similarity index 100%
rename from src/scala/spark/Logging.scala
rename to src/main/scala/spark/Logging.scala
diff --git a/src/scala/spark/MesosScheduler.scala b/src/main/scala/spark/MesosScheduler.scala
similarity index 100%
rename from src/scala/spark/MesosScheduler.scala
rename to src/main/scala/spark/MesosScheduler.scala
diff --git a/src/scala/spark/NumberedSplitRDD.scala b/src/main/scala/spark/NumberedSplitRDD.scala
similarity index 100%
rename from src/scala/spark/NumberedSplitRDD.scala
rename to src/main/scala/spark/NumberedSplitRDD.scala
diff --git a/src/scala/spark/ParallelArray.scala b/src/main/scala/spark/ParallelArray.scala
similarity index 100%
rename from src/scala/spark/ParallelArray.scala
rename to src/main/scala/spark/ParallelArray.scala
diff --git a/src/scala/spark/RDD.scala b/src/main/scala/spark/RDD.scala
similarity index 100%
rename from src/scala/spark/RDD.scala
rename to src/main/scala/spark/RDD.scala
diff --git a/src/scala/spark/Scheduler.scala b/src/main/scala/spark/Scheduler.scala
similarity index 100%
rename from src/scala/spark/Scheduler.scala
rename to src/main/scala/spark/Scheduler.scala
diff --git a/src/scala/spark/SerializableWritable.scala b/src/main/scala/spark/SerializableWritable.scala
similarity index 100%
rename from src/scala/spark/SerializableWritable.scala
rename to src/main/scala/spark/SerializableWritable.scala
diff --git a/src/scala/spark/Shuffle.scala b/src/main/scala/spark/Shuffle.scala
similarity index 100%
rename from src/scala/spark/Shuffle.scala
rename to src/main/scala/spark/Shuffle.scala
diff --git a/src/scala/spark/SimpleJob.scala b/src/main/scala/spark/SimpleJob.scala
similarity index 100%
rename from src/scala/spark/SimpleJob.scala
rename to src/main/scala/spark/SimpleJob.scala
diff --git a/src/scala/spark/SizeEstimator.scala b/src/main/scala/spark/SizeEstimator.scala
similarity index 100%
rename from src/scala/spark/SizeEstimator.scala
rename to src/main/scala/spark/SizeEstimator.scala
diff --git a/src/scala/spark/SoftReferenceCache.scala b/src/main/scala/spark/SoftReferenceCache.scala
similarity index 100%
rename from src/scala/spark/SoftReferenceCache.scala
rename to src/main/scala/spark/SoftReferenceCache.scala
diff --git a/src/scala/spark/SparkContext.scala b/src/main/scala/spark/SparkContext.scala
similarity index 100%
rename from src/scala/spark/SparkContext.scala
rename to src/main/scala/spark/SparkContext.scala
diff --git a/src/scala/spark/SparkException.scala b/src/main/scala/spark/SparkException.scala
similarity index 100%
rename from src/scala/spark/SparkException.scala
rename to src/main/scala/spark/SparkException.scala
diff --git a/src/scala/spark/Split.scala b/src/main/scala/spark/Split.scala
similarity index 100%
rename from src/scala/spark/Split.scala
rename to src/main/scala/spark/Split.scala
diff --git a/src/scala/spark/Task.scala b/src/main/scala/spark/Task.scala
similarity index 100%
rename from src/scala/spark/Task.scala
rename to src/main/scala/spark/Task.scala
diff --git a/src/scala/spark/TaskResult.scala b/src/main/scala/spark/TaskResult.scala
similarity index 100%
rename from src/scala/spark/TaskResult.scala
rename to src/main/scala/spark/TaskResult.scala
diff --git a/src/scala/spark/Utils.scala b/src/main/scala/spark/Utils.scala
similarity index 100%
rename from src/scala/spark/Utils.scala
rename to src/main/scala/spark/Utils.scala
diff --git a/src/scala/spark/WeakReferenceCache.scala b/src/main/scala/spark/WeakReferenceCache.scala
similarity index 100%
rename from src/scala/spark/WeakReferenceCache.scala
rename to src/main/scala/spark/WeakReferenceCache.scala
diff --git a/src/scala/spark/repl/ExecutorClassLoader.scala b/src/main/scala/spark/repl/ExecutorClassLoader.scala
similarity index 100%
rename from src/scala/spark/repl/ExecutorClassLoader.scala
rename to src/main/scala/spark/repl/ExecutorClassLoader.scala
diff --git a/src/scala/spark/repl/Main.scala b/src/main/scala/spark/repl/Main.scala
similarity index 100%
rename from src/scala/spark/repl/Main.scala
rename to src/main/scala/spark/repl/Main.scala
diff --git a/src/scala/spark/repl/SparkCompletion.scala b/src/main/scala/spark/repl/SparkCompletion.scala
similarity index 99%
rename from src/scala/spark/repl/SparkCompletion.scala
rename to src/main/scala/spark/repl/SparkCompletion.scala
index d67438445b0f301719cf760271d9bc1fd03a09af..ac70db4dbea173d453c57148b1fc1f5385d6758f 100644
--- a/src/scala/spark/repl/SparkCompletion.scala
+++ b/src/main/scala/spark/repl/SparkCompletion.scala
@@ -310,7 +310,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
       else xs.reduceLeft(_ zip _ takeWhile (x => x._1 == x._2) map (_._1) mkString)
 
     // This is jline's entry point for completion.
-    override def complete(_buf: String, cursor: Int, candidates: JList[String]): Int = {
+    override def complete(_buf: String, cursor: Int, candidates: JList[_]): Int = {
       val buf = onull(_buf)
       verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
       DBG("complete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
@@ -321,7 +321,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
           case Nil  => None
           case xs   =>
             // modify in place and return the position
-            xs foreach (candidates add _)
+            xs.foreach(x => candidates.asInstanceOf[JList[AnyRef]].add(x))
 
             // update the last buffer unless this is an alternatives list
             if (xs contains "") Some(p.cursor)
diff --git a/src/scala/spark/repl/SparkCompletionOutput.scala b/src/main/scala/spark/repl/SparkCompletionOutput.scala
similarity index 100%
rename from src/scala/spark/repl/SparkCompletionOutput.scala
rename to src/main/scala/spark/repl/SparkCompletionOutput.scala
diff --git a/src/scala/spark/repl/SparkInteractiveReader.scala b/src/main/scala/spark/repl/SparkInteractiveReader.scala
similarity index 100%
rename from src/scala/spark/repl/SparkInteractiveReader.scala
rename to src/main/scala/spark/repl/SparkInteractiveReader.scala
diff --git a/src/scala/spark/repl/SparkInterpreter.scala b/src/main/scala/spark/repl/SparkInterpreter.scala
similarity index 100%
rename from src/scala/spark/repl/SparkInterpreter.scala
rename to src/main/scala/spark/repl/SparkInterpreter.scala
diff --git a/src/scala/spark/repl/SparkInterpreterLoop.scala b/src/main/scala/spark/repl/SparkInterpreterLoop.scala
similarity index 99%
rename from src/scala/spark/repl/SparkInterpreterLoop.scala
rename to src/main/scala/spark/repl/SparkInterpreterLoop.scala
index 5bad0a37daaee8c7ac87d66870f4fe166cd2e816..d4974009ce055afc5d682fe952f5c78f7055d61f 100644
--- a/src/scala/spark/repl/SparkInterpreterLoop.scala
+++ b/src/main/scala/spark/repl/SparkInterpreterLoop.scala
@@ -129,7 +129,8 @@ extends InterpreterControl {
       settings.classpath append addedClasspath
       
     interpreter = new SparkInterpreter(settings, out) {
-      override protected def parentClassLoader = classOf[SparkInterpreterLoop].getClassLoader
+      override protected def parentClassLoader = 
+        classOf[SparkInterpreterLoop].getClassLoader
     }
     interpreter.setContextClassLoader()
     // interpreter.quietBind("settings", "spark.repl.SparkInterpreterSettings", interpreter.isettings)
diff --git a/src/scala/spark/repl/SparkInterpreterSettings.scala b/src/main/scala/spark/repl/SparkInterpreterSettings.scala
similarity index 100%
rename from src/scala/spark/repl/SparkInterpreterSettings.scala
rename to src/main/scala/spark/repl/SparkInterpreterSettings.scala
diff --git a/src/scala/spark/repl/SparkJLineReader.scala b/src/main/scala/spark/repl/SparkJLineReader.scala
similarity index 100%
rename from src/scala/spark/repl/SparkJLineReader.scala
rename to src/main/scala/spark/repl/SparkJLineReader.scala
diff --git a/src/scala/spark/repl/SparkSimpleReader.scala b/src/main/scala/spark/repl/SparkSimpleReader.scala
similarity index 100%
rename from src/scala/spark/repl/SparkSimpleReader.scala
rename to src/main/scala/spark/repl/SparkSimpleReader.scala
diff --git a/src/test/spark/ParallelArraySplitSuite.scala b/src/test/scala/spark/ParallelArraySplitSuite.scala
similarity index 100%
rename from src/test/spark/ParallelArraySplitSuite.scala
rename to src/test/scala/spark/ParallelArraySplitSuite.scala
diff --git a/src/test/spark/ShuffleSuite.scala b/src/test/scala/spark/ShuffleSuite.scala
similarity index 100%
rename from src/test/spark/ShuffleSuite.scala
rename to src/test/scala/spark/ShuffleSuite.scala
diff --git a/src/test/spark/repl/ReplSuite.scala b/src/test/scala/spark/repl/ReplSuite.scala
similarity index 89%
rename from src/test/spark/repl/ReplSuite.scala
rename to src/test/scala/spark/repl/ReplSuite.scala
index 8b38cde85f8ba0371dad67efe6f3bd3742220a7c..225e766c7114494bb7b67c757145c87a7c83a39e 100644
--- a/src/test/spark/repl/ReplSuite.scala
+++ b/src/test/scala/spark/repl/ReplSuite.scala
@@ -1,6 +1,10 @@
 package spark.repl
 
 import java.io._
+import java.net.URLClassLoader
+
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.JavaConversions._
 
 import org.scalatest.FunSuite
 
@@ -8,9 +12,20 @@ class ReplSuite extends FunSuite {
   def runInterpreter(master: String, input: String): String = {
     val in = new BufferedReader(new StringReader(input + "\n"))
     val out = new StringWriter()
+    val cl = getClass.getClassLoader
+    var paths = new ArrayBuffer[String]
+    if (cl.isInstanceOf[URLClassLoader]) {
+      val urlLoader = cl.asInstanceOf[URLClassLoader]
+      for (url <- urlLoader.getURLs) {
+        if (url.getProtocol == "file") {
+          paths += url.getFile
+        }
+      }
+    }
     val interp = new SparkInterpreterLoop(in, new PrintWriter(out), master)
     spark.repl.Main.interp = interp
-    interp.main(new Array[String](0))
+    val separator = System.getProperty("path.separator")
+    interp.main(Array("-classpath", paths.mkString(separator)))
     spark.repl.Main.interp = null
     return out.toString
   }