diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4c34c888cfd5e15c091ac6d3da13d4ebeffbbd3e..06e561ae0d89bb5c1795b8593a8045619138aa80 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -274,6 +274,11 @@ object SparkBuild extends PomBuild {
    * Usage: `build/sbt sparkShell`
    */
   val sparkShell = taskKey[Unit]("start a spark-shell.")
+  val sparkPackage = inputKey[Unit](
+    s"""
+       |Download and run a spark package.
+       |Usage `builds/sbt "sparkPackage <group:artifact:version> <MainClass> [args]
+     """.stripMargin)
   val sparkSql = taskKey[Unit]("starts the spark sql CLI.")
 
   enable(Seq(
@@ -287,6 +292,16 @@ object SparkBuild extends PomBuild {
       (runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
     },
 
+    sparkPackage := {
+      import complete.DefaultParsers._
+      val packages :: className :: otherArgs = spaceDelimited("<group:artifact:version> <MainClass> [args]").parsed.toList
+      val scalaRun = (runner in run).value
+      val classpath = (fullClasspath in Runtime).value
+      val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in "core").value.getCanonicalPath) ++ otherArgs
+      println(args)
+      scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log)
+    },
+
     javaOptions in Compile += "-Dspark.master=local",
 
     sparkSql := {