From 3292e2a71bfb5df5ba156cf7557747d164d12291 Mon Sep 17 00:00:00 2001
From: Bouke van der Bijl <boukevanderbijl@gmail.com>
Date: Mon, 5 May 2014 11:19:35 -0700
Subject: [PATCH] SPARK-1721: Reset the thread classLoader in the Mesos
 Executor

This is because Mesos calls it with a different environment or something, the result is that the Spark jar is missing and it can't load classes.

This fixes http://apache-spark-user-list.1001560.n3.nabble.com/java-lang-ClassNotFoundException-spark-on-mesos-td3510.html

I have no idea whether this is the right fix, I can only confirm that it fixes the issue for us.

The `registered` method is called from mesos (https://github.com/apache/mesos/blob/765ff9bc2ac5a12d4362f8235b572a37d646390a/src/java/jni/org_apache_mesos_MesosExecutorDriver.cpp)

I am unsure which commit caused this regression

Author: Bouke van der Bijl <boukevanderbijl@gmail.com>

Closes #620 from bouk/mesos-classloader-fix and squashes the following commits:

c13eae0 [Bouke van der Bijl] Use getContextOrSparkClassLoader in SparkEnv and CompressionCodec
---
 core/src/main/scala/org/apache/spark/SparkEnv.scala           | 4 +---
 .../src/main/scala/org/apache/spark/io/CompressionCodec.scala | 3 ++-
 2 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index bea435ec34..d40ed27da5 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -156,13 +156,11 @@ object SparkEnv extends Logging {
       conf.set("spark.driver.port",  boundPort.toString)
     }
 
-    val classLoader = Thread.currentThread.getContextClassLoader
-
     // Create an instance of the class named by the given Java system property, or by
     // defaultClassName if the property is not set, and return it as a T
     def instantiateClass[T](propertyName: String, defaultClassName: String): T = {
       val name = conf.get(propertyName,  defaultClassName)
-      val cls = Class.forName(name, true, classLoader)
+      val cls = Class.forName(name, true, Utils.getContextOrSparkClassLoader)
       // First try with the constructor that takes SparkConf. If we can't find one,
       // use a no-arg constructor instead.
       try {
diff --git a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index e1a5ee316b..4b0fe1ab82 100644
--- a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -24,6 +24,7 @@ import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
 
 import org.apache.spark.SparkConf
 import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.util.Utils
 
 /**
  * :: DeveloperApi ::
@@ -49,7 +50,7 @@ private[spark] object CompressionCodec {
   }
 
   def createCodec(conf: SparkConf, codecName: String): CompressionCodec = {
-    val ctor = Class.forName(codecName, true, Thread.currentThread.getContextClassLoader)
+    val ctor = Class.forName(codecName, true, Utils.getContextOrSparkClassLoader)
       .getConstructor(classOf[SparkConf])
     ctor.newInstance(conf).asInstanceOf[CompressionCodec]
   }
-- 
GitLab