diff --git a/core/src/main/scala/spark/Executor.scala b/core/src/main/scala/spark/Executor.scala
index e1256a229ebf6a375415a630d25323b8ca3ebe2a..31ba122baff85d30b258c1adfcd385c7cd3ec079 100644
--- a/core/src/main/scala/spark/Executor.scala
+++ b/core/src/main/scala/spark/Executor.scala
@@ -87,6 +87,13 @@ class Executor extends org.apache.mesos.Executor with Logging {
                              .build())
         }
         case t: Throwable => {
+          val reason = OtherFailure(t.toString())
+          d.sendStatusUpdate(TaskStatus.newBuilder()
+                             .setTaskId(desc.getTaskId)
+                             .setState(TaskState.TASK_FAILED)
+                             .setData(ByteString.copyFrom(Utils.serialize(reason)))
+                             .build())
+
           // TODO: Handle errors in tasks less dramatically
           logError("Exception in task ID " + tid, t)
           System.exit(1)
diff --git a/core/src/main/scala/spark/SimpleJob.scala b/core/src/main/scala/spark/SimpleJob.scala
index d982a75ba0fa210c9dc74946da0e9214efbe7de9..6a27f159c4e71cc7cec47f424c9de56cfd902612 100644
--- a/core/src/main/scala/spark/SimpleJob.scala
+++ b/core/src/main/scala/spark/SimpleJob.scala
@@ -229,6 +229,8 @@ extends Job(jobId) with Logging
             if (tasksFinished == numTasks)
               sched.jobFinished(this)
             return
+          case otherFailure: OtherFailure =>
+            logInfo("Loss was due to %s".format(otherFailure.message))
           case _ => {}
         }
       }
diff --git a/examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala b/examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala
new file mode 100644
index 0000000000000000000000000000000000000000..46f658eab24201389b0c7a89f091493882d56334
--- /dev/null
+++ b/examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala
@@ -0,0 +1,18 @@
+package spark.examples
+
+import spark.SparkContext
+
+object ExceptionHandlingTest {
+  def main(args: Array[String]) {
+    if (args.length == 0) {
+      System.err.println("Usage: ExceptionHandlingTest <host>")
+      System.exit(1)
+    }
+
+    val sc = new SparkContext(args(0), "ExceptionHandlingTest")
+    sc.parallelize(0 until sc.defaultParallelism).foreach { i =>
+      if (Math.random > 0.75)
+        throw new Exception("Testing exception handling")
+    }
+  }
+}