diff --git a/core/lib/mesos.jar b/core/lib/mesos.jar index 921149edae6a78dd27e6413172328428d68a3aa0..731720e83ec063d00d6dd126555f3dc4aef138a8 100644 Binary files a/core/lib/mesos.jar and b/core/lib/mesos.jar differ diff --git a/core/lib/protobuf-2.3.0.jar b/core/lib/protobuf-2.3.0.jar deleted file mode 100644 index b3d405640792cf0a59c9eec722f49c5db101655d..0000000000000000000000000000000000000000 Binary files a/core/lib/protobuf-2.3.0.jar and /dev/null differ diff --git a/core/src/main/scala/spark/MesosScheduler.scala b/core/src/main/scala/spark/MesosScheduler.scala index 393a33af8ca0d9b6f165126e2d09b1931019acb8..5ed769c5c2d8bf37b4ecaf86ab8da228bdb22981 100644 --- a/core/src/main/scala/spark/MesosScheduler.scala +++ b/core/src/main/scala/spark/MesosScheduler.scala @@ -79,7 +79,13 @@ extends MScheduler with DAGScheduler with Logging override def run { val sched = MesosScheduler.this sched.driver = new MesosSchedulerDriver(sched, master) - sched.driver.run() + try { + val ret = sched.driver.run() + logInfo("driver.run() returned with code " + ret) + } catch { + case e: Exception => + logError("driver.run() failed", e) + } } }.start } diff --git a/core/src/main/scala/spark/SimpleJob.scala b/core/src/main/scala/spark/SimpleJob.scala index 2961561f3404d11a03ead3d25b0e410419ca0770..6255ee93885ec5b54029034293e60fefc6c12c4a 100644 --- a/core/src/main/scala/spark/SimpleJob.scala +++ b/core/src/main/scala/spark/SimpleJob.scala @@ -204,7 +204,7 @@ extends Job(jobId) with Logging val index = tidToIndex(tid.getValue) if (!finished(index)) { tasksFinished += 1 - logInfo("Finished TID %d (progress: %d/%d)".format( + logInfo("Finished TID %s (progress: %d/%d)".format( tid, tasksFinished, numTasks)) // Deserialize task result val result = Utils.deserialize[TaskResult[_]](status.getData.toByteArray) @@ -223,7 +223,7 @@ extends Job(jobId) with Logging val tid = status.getTaskId val index = tidToIndex(tid.getValue) if (!finished(index)) { - logInfo("Lost TID %d (task %d:%d)".format(tid, jobId, index)) + logInfo("Lost TID %s (task %d:%d)".format(tid, jobId, index)) launched(index) = false tasksLaunched -= 1 // Check if the problem is a map output fetch failure. In that case, this diff --git a/project/build/SparkProject.scala b/project/build/SparkProject.scala index 54a82193411b1634f5dfdb52706a145529c18348..00409be281e7698503b9a69f8518d4ac84047bd2 100644 --- a/project/build/SparkProject.scala +++ b/project/build/SparkProject.scala @@ -41,6 +41,7 @@ class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProje val asm = "asm" % "asm-all" % "3.3.1" val scalaTest = "org.scalatest" % "scalatest" % "1.3" % "test" val scalaCheck = "org.scala-tools.testing" %% "scalacheck" % "1.7" % "test" + val protobuf = "com.google.protobuf" % "protobuf-java" % "2.3.0" } class ReplProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport