From 480357cc6d71c682fe703611c71c1e6a36e6ce9a Mon Sep 17 00:00:00 2001
From: petermaxlee <petermaxlee@gmail.com>
Date: Wed, 6 Jul 2016 10:46:22 -0700
Subject: [PATCH] [SPARK-16304] LinkageError should not crash Spark executor

## What changes were proposed in this pull request?
This patch updates the failure handling logic so Spark executor does not crash when seeing LinkageError.

## How was this patch tested?
Added an end-to-end test in FailureSuite.

Author: petermaxlee <petermaxlee@gmail.com>

Closes #13982 from petermaxlee/SPARK-16304.
---
 core/src/main/scala/org/apache/spark/util/Utils.scala   | 6 +++++-
 core/src/test/scala/org/apache/spark/FailureSuite.scala | 9 +++++++++
 2 files changed, 14 insertions(+), 1 deletion(-)

diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 156cf1748b..298e6243aa 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1881,7 +1881,11 @@ private[spark] object Utils extends Logging {
   /** Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal. */
   def isFatalError(e: Throwable): Boolean = {
     e match {
-      case NonFatal(_) | _: InterruptedException | _: NotImplementedError | _: ControlThrowable =>
+      case NonFatal(_) |
+           _: InterruptedException |
+           _: NotImplementedError |
+           _: ControlThrowable |
+           _: LinkageError =>
         false
       case _ =>
         true
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 132f6361e4..d805c67714 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -253,6 +253,15 @@ class FailureSuite extends SparkFunSuite with LocalSparkContext {
     rdd.count()
   }
 
+  test("SPARK-16304: Link error should not crash executor") {
+    sc = new SparkContext("local[1,2]", "test")
+    intercept[SparkException] {
+      sc.parallelize(1 to 2).foreach { i =>
+        throw new LinkageError()
+      }
+    }
+  }
+
   // TODO: Need to add tests with shuffle fetch failures.
 }
 
-- 
GitLab