diff --git a/core/src/main/scala/spark/deploy/JsonProtocol.scala b/core/src/main/scala/spark/deploy/JsonProtocol.scala
new file mode 100644
index 0000000000000000000000000000000000000000..f14f804b3ab4c10756b8771e2e8df651c7ec3210
--- /dev/null
+++ b/core/src/main/scala/spark/deploy/JsonProtocol.scala
@@ -0,0 +1,57 @@
+package spark.deploy
+
+import master.{JobInfo, WorkerInfo}
+import cc.spray.json._
+
+/**
+ * spray-json helper class containing implicit conversion to json for marshalling responses
+ */
+private[spark] object JsonProtocol extends DefaultJsonProtocol {
+  implicit object WorkerInfoJsonFormat extends RootJsonWriter[WorkerInfo] {
+    def write(obj: WorkerInfo) = JsObject(
+      "id" -> JsString(obj.id),
+      "host" -> JsString(obj.host),
+      "webuiaddress" -> JsString(obj.webUiAddress),
+      "cores" -> JsNumber(obj.cores),
+      "coresused" -> JsNumber(obj.coresUsed),
+      "memory" -> JsNumber(obj.memory),
+      "memoryused" -> JsNumber(obj.memoryUsed)
+    )
+  }
+
+  implicit object JobInfoJsonFormat extends RootJsonWriter[JobInfo] {
+    def write(obj: JobInfo) = JsObject(
+      "starttime" -> JsNumber(obj.startTime),
+      "id" -> JsString(obj.id),
+      "name" -> JsString(obj.desc.name),
+      "cores" -> JsNumber(obj.desc.cores),
+      "user" -> JsString(obj.desc.user),
+      "memoryperslave" -> JsNumber(obj.desc.memoryPerSlave),
+      "submitdate" -> JsString(obj.submitDate.toString))
+  }
+
+  implicit object MasterStateJsonFormat extends RootJsonWriter[MasterState] {
+    def write(obj: MasterState) = JsObject(
+      "url" -> JsString("spark://" + obj.uri),
+      "workers" -> JsArray(obj.workers.toList.map(_.toJson)),
+      "cores" -> JsNumber(obj.workers.map(_.cores).sum),
+      "coresused" -> JsNumber(obj.workers.map(_.coresUsed).sum),
+      "memory" -> JsNumber(obj.workers.map(_.memory).sum),
+      "memoryused" -> JsNumber(obj.workers.map(_.memoryUsed).sum),
+      "activejobs" -> JsArray(obj.activeJobs.toList.map(_.toJson)),
+      "completedjobs" -> JsArray(obj.completedJobs.toList.map(_.toJson))
+    )
+  }
+
+  implicit object WorkerStateJsonFormat extends RootJsonWriter[WorkerState] {
+    def write(obj: WorkerState) = JsObject(
+      "id" -> JsString(obj.workerId),
+      "masterurl" -> JsString(obj.masterUrl),
+      "masterwebuiurl" -> JsString(obj.masterWebUiUrl),
+      "cores" -> JsNumber(obj.cores),
+      "coresused" -> JsNumber(obj.coresUsed),
+      "memory" -> JsNumber(obj.memory),
+      "memoryused" -> JsNumber(obj.memoryUsed)
+    )
+  }
+}
diff --git a/core/src/main/scala/spark/deploy/master/MasterWebUI.scala b/core/src/main/scala/spark/deploy/master/MasterWebUI.scala
index 3cdd3721f56cfb5cc6de88d51b9c48ffcc50315c..a96b55d6f32ca2f9773f5f74a94641afb689f1f8 100644
--- a/core/src/main/scala/spark/deploy/master/MasterWebUI.scala
+++ b/core/src/main/scala/spark/deploy/master/MasterWebUI.scala
@@ -9,6 +9,9 @@ import cc.spray.Directives
 import cc.spray.directives._
 import cc.spray.typeconversion.TwirlSupport._
 import spark.deploy._
+import cc.spray.http.MediaTypes
+import JsonProtocol._
+import cc.spray.typeconversion.SprayJsonSupport._
 
 private[spark]
 class MasterWebUI(val actorSystem: ActorSystem, master: ActorRef) extends Directives {
@@ -19,13 +22,19 @@ class MasterWebUI(val actorSystem: ActorSystem, master: ActorRef) extends Direct
   
   val handler = {
     get {
-      path("") {
-        completeWith {
+      (path("") & parameters('format ?)) {
+        case Some(js) if js.equalsIgnoreCase("json") =>
           val future = master ? RequestMasterState
-          future.map { 
-            masterState => spark.deploy.master.html.index.render(masterState.asInstanceOf[MasterState])
+          respondWithMediaType(MediaTypes.`application/json`) { ctx =>
+            ctx.complete(future.mapTo[MasterState])
+          }
+        case _ =>
+          completeWith {
+            val future = master ? RequestMasterState
+            future.map {
+              masterState => spark.deploy.master.html.index.render(masterState.asInstanceOf[MasterState])
+            }
           }
-        }
       } ~
       path("job") {
         parameter("jobId") { jobId =>
diff --git a/core/src/main/scala/spark/deploy/worker/WorkerWebUI.scala b/core/src/main/scala/spark/deploy/worker/WorkerWebUI.scala
index d06f4884ee63ca08dd764d78bddab51f7613a677..84b6c16bd6279694bdb389f08383139608d98786 100644
--- a/core/src/main/scala/spark/deploy/worker/WorkerWebUI.scala
+++ b/core/src/main/scala/spark/deploy/worker/WorkerWebUI.scala
@@ -7,7 +7,10 @@ import akka.util.Timeout
 import akka.util.duration._
 import cc.spray.Directives
 import cc.spray.typeconversion.TwirlSupport._
-import spark.deploy.{WorkerState, RequestWorkerState}
+import spark.deploy.{JsonProtocol, WorkerState, RequestWorkerState}
+import cc.spray.http.MediaTypes
+import JsonProtocol._
+import cc.spray.typeconversion.SprayJsonSupport._
 
 private[spark]
 class WorkerWebUI(val actorSystem: ActorSystem, worker: ActorRef) extends Directives {
@@ -18,13 +21,20 @@ class WorkerWebUI(val actorSystem: ActorSystem, worker: ActorRef) extends Direct
   
   val handler = {
     get {
-      path("") {
-        completeWith{
+      (path("") & parameters('format ?)) {
+        case Some(js) if js.equalsIgnoreCase("json") => {
           val future = worker ? RequestWorkerState
-          future.map { workerState =>
-            spark.deploy.worker.html.index(workerState.asInstanceOf[WorkerState])
+          respondWithMediaType(MediaTypes.`application/json`) { ctx =>
+            ctx.complete(future.mapTo[WorkerState])
           }
         }
+        case _ =>
+          completeWith{
+            val future = worker ? RequestWorkerState
+            future.map { workerState =>
+              spark.deploy.worker.html.index(workerState.asInstanceOf[WorkerState])
+            }
+          }
       } ~
       path("log") {
         parameters("jobId", "executorId", "logType") { (jobId, executorId, logType) =>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 518c4130f03b2544ae3e202329252bcec653e733..219674028eb7d2884986a706c121217f924a3869 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -134,6 +134,7 @@ object SparkBuild extends Build {
       "colt" % "colt" % "1.2.0",
       "cc.spray" % "spray-can" % "1.0-M2.1",
       "cc.spray" % "spray-server" % "1.0-M2.1",
+      "cc.spray" %%  "spray-json" % "1.1.1",
       "org.apache.mesos" % "mesos" % "0.9.0-incubating"
     ) ++ (if (HADOOP_MAJOR_VERSION == "2") Some("org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION) else None).toSeq,
     unmanagedSourceDirectories in Compile <+= baseDirectory{ _ / ("src/hadoop" + HADOOP_MAJOR_VERSION + "/scala") }