diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
index 808bbe8c8f0b072b84566a6ae74c9cc617597f15..f62ae374663a7cb662dc569ee4805faac0b45472 100644
--- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
@@ -150,7 +150,7 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
       activeTasks += taskStart.taskInfo
     }
 
-    override def onTaskEnd(taskEnd: SparkListenerTaskEnd) = synchronized {
+    override def onTaskEnd(taskEnd: SparkListenerTaskEnd) {
       val eid = taskEnd.taskInfo.executorId
       val activeTasks = executorToTasksActive.getOrElseUpdate(eid, new HashSet[TaskInfo]())
       val newDuration = executorToDuration.getOrElse(eid, 0L) + taskEnd.taskInfo.duration
@@ -168,20 +168,22 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
         }
 
       // update shuffle read/write
-      val shuffleRead = taskEnd.taskMetrics.shuffleReadMetrics
-      shuffleRead match {
-        case Some(s) =>
-          val newShuffleRead = executorToShuffleRead.getOrElse(eid, 0L) + s.remoteBytesRead
-          executorToShuffleRead.put(eid, newShuffleRead)
-        case _ => {}
-      }
-      val shuffleWrite = taskEnd.taskMetrics.shuffleWriteMetrics
-      shuffleWrite match {
-        case Some(s) => {
-          val newShuffleWrite = executorToShuffleWrite.getOrElse(eid, 0L) + s.shuffleBytesWritten
-          executorToShuffleWrite.put(eid, newShuffleWrite)
+      if (null != taskEnd.taskMetrics) {
+        val shuffleRead = taskEnd.taskMetrics.shuffleReadMetrics
+        shuffleRead match {
+          case Some(s) =>
+            val newShuffleRead = executorToShuffleRead.getOrElse(eid, 0L) + s.remoteBytesRead
+            executorToShuffleRead.put(eid, newShuffleRead)
+          case _ => {}
+        }
+        val shuffleWrite = taskEnd.taskMetrics.shuffleWriteMetrics
+        shuffleWrite match {
+          case Some(s) => {
+            val newShuffleWrite = executorToShuffleWrite.getOrElse(eid, 0L) + s.shuffleBytesWritten
+            executorToShuffleWrite.put(eid, newShuffleWrite)
+          }
+          case _ => {}
         }
-        case _ => {}
       }
     }
   }
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
index 8c92ff19a6788b938490592d4c0fe05350e33124..64ce7159938990ac10061189c2db53d36dc6a0fb 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
@@ -147,18 +147,20 @@ private[spark] class JobProgressListener(val sc: SparkContext) extends SparkList
         y.duration += taskEnd.taskInfo.duration
 
         // update shuffle read/write
-        val shuffleRead = taskEnd.taskMetrics.shuffleReadMetrics
-        shuffleRead match {
-          case Some(s) =>
-            y.shuffleRead += s.remoteBytesRead
-          case _ => {}
-        }
-        val shuffleWrite = taskEnd.taskMetrics.shuffleWriteMetrics
-        shuffleWrite match {
-          case Some(s) => {
-            y.shuffleWrite += s.shuffleBytesWritten
+        if (null != taskEnd.taskMetrics) {
+          val shuffleRead = taskEnd.taskMetrics.shuffleReadMetrics
+          shuffleRead match {
+            case Some(s) =>
+              y.shuffleRead += s.remoteBytesRead
+            case _ => {}
+          }
+          val shuffleWrite = taskEnd.taskMetrics.shuffleWriteMetrics
+          shuffleWrite match {
+            case Some(s) => {
+              y.shuffleWrite += s.shuffleBytesWritten
+            }
+            case _ => {}
           }
-          case _ => {}
         }
       }
       case _ => {}