diff --git a/core/src/main/scala/spark/deploy/JsonProtocol.scala b/core/src/main/scala/spark/deploy/JsonProtocol.scala
index f14f804b3ab4c10756b8771e2e8df651c7ec3210..732fa080645b0e7752f507f528d539e67b8f8ddf 100644
--- a/core/src/main/scala/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/spark/deploy/JsonProtocol.scala
@@ -1,6 +1,7 @@
 package spark.deploy
 
 import master.{JobInfo, WorkerInfo}
+import worker.ExecutorRunner
 import cc.spray.json._
 
 /**
@@ -30,6 +31,24 @@ private[spark] object JsonProtocol extends DefaultJsonProtocol {
       "submitdate" -> JsString(obj.submitDate.toString))
   }
 
+  implicit object JobDescriptionJsonFormat extends RootJsonWriter[JobDescription] {
+    def write(obj: JobDescription) = JsObject(
+      "name" -> JsString(obj.name),
+      "cores" -> JsNumber(obj.cores),
+      "memoryperslave" -> JsNumber(obj.memoryPerSlave),
+      "user" -> JsString(obj.user)
+    )
+  }
+
+  implicit object ExecutorRunnerJsonFormat extends RootJsonWriter[ExecutorRunner] {
+    def write(obj: ExecutorRunner) = JsObject(
+      "id" -> JsNumber(obj.execId),
+      "memory" -> JsNumber(obj.memory),
+      "jobid" -> JsString(obj.jobId),
+      "jobdesc" -> obj.jobDesc.toJson.asJsObject
+    )
+  }
+
   implicit object MasterStateJsonFormat extends RootJsonWriter[MasterState] {
     def write(obj: MasterState) = JsObject(
       "url" -> JsString("spark://" + obj.uri),
@@ -51,7 +70,9 @@ private[spark] object JsonProtocol extends DefaultJsonProtocol {
       "cores" -> JsNumber(obj.cores),
       "coresused" -> JsNumber(obj.coresUsed),
       "memory" -> JsNumber(obj.memory),
-      "memoryused" -> JsNumber(obj.memoryUsed)
+      "memoryused" -> JsNumber(obj.memoryUsed),
+      "executors" -> JsArray(obj.executors.toList.map(_.toJson)),
+      "finishedexecutors" -> JsArray(obj.finishedExecutors.toList.map(_.toJson))
     )
   }
 }
diff --git a/core/src/main/scala/spark/deploy/master/MasterWebUI.scala b/core/src/main/scala/spark/deploy/master/MasterWebUI.scala
index 580014ef3f495621c033a8656480da80caf7f6e8..458ee2d66589c1f910f9678ffb2eb8d6a2b23a27 100644
--- a/core/src/main/scala/spark/deploy/master/MasterWebUI.scala
+++ b/core/src/main/scala/spark/deploy/master/MasterWebUI.scala
@@ -38,20 +38,36 @@ class MasterWebUI(val actorSystem: ActorSystem, master: ActorRef) extends Direct
           }
       } ~
       path("job") {
-        parameter("jobId") { jobId =>
-          completeWith {
+        parameters("jobId", 'format ?) {
+          case (jobId, Some(js)) if (js.equalsIgnoreCase("json")) =>
             val future = master ? RequestMasterState
-            future.map { state => 
-              val masterState = state.asInstanceOf[MasterState]
-              
-              // A bit ugly an inefficient, but we won't have a number of jobs 
-              // so large that it will make a significant difference.
-              (masterState.activeJobs ++ masterState.completedJobs).find(_.id == jobId) match {
-                case Some(job) => spark.deploy.master.html.job_details.render(job)
-                case _ => null
+            val jobInfo = for (masterState <- future.mapTo[MasterState]) yield {
+              masterState.activeJobs.find(_.id == jobId) match {
+                case Some(job) => job
+                case _ => masterState.completedJobs.find(_.id == jobId) match {
+                  case Some(job) => job
+                  case _ => null
+                }
+              }
+            }
+            respondWithMediaType(MediaTypes.`application/json`) { ctx =>
+              ctx.complete(jobInfo.mapTo[JobInfo])
+            }
+          case (jobId, _) =>
+            completeWith {
+              val future = master ? RequestMasterState
+              future.map { state =>
+                val masterState = state.asInstanceOf[MasterState]
+
+                masterState.activeJobs.find(_.id == jobId) match {
+                  case Some(job) => spark.deploy.master.html.job_details.render(job)
+                  case _ => masterState.completedJobs.find(_.id == jobId) match {
+                    case Some(job) => spark.deploy.master.html.job_details.render(job)
+                    case _ => null
+                  }
+                }
               }
             }
-          }
         }
       } ~
       pathPrefix("static") {