From aa6f83289b87f38481dbae60ad91d2ac78ccea46 Mon Sep 17 00:00:00 2001
From: Liang-Chi Hsieh <viirya@gmail.com>
Date: Fri, 19 Jul 2013 22:25:28 +0800
Subject: [PATCH] A better fix for giving local jars unde Yarn mode.

---
 .../scala/spark/deploy/SparkHadoopUtil.scala    |  3 +++
 .../scala/spark/deploy/SparkHadoopUtil.scala    |  3 +++
 core/src/main/scala/spark/SparkContext.scala    | 17 +++++++----------
 3 files changed, 13 insertions(+), 10 deletions(-)

diff --git a/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
index df55be1254..9f040faac3 100644
--- a/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
@@ -41,4 +41,7 @@ object SparkHadoopUtil {
 
   // add any user credentials to the job conf which are necessary for running on a secure Hadoop cluster
   def addCredentials(conf: JobConf) {}
+
+  def isYarnMode(): Boolean = { False }
+
 }
diff --git a/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
index df55be1254..9f040faac3 100644
--- a/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
@@ -41,4 +41,7 @@ object SparkHadoopUtil {
 
   // add any user credentials to the job conf which are necessary for running on a secure Hadoop cluster
   def addCredentials(conf: JobConf) {}
+
+  def isYarnMode(): Boolean = { False }
+
 }
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index 957c541ecf..c01e315e35 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -102,7 +102,6 @@ class SparkContext(
   }
 
   private val isLocal = (master == "local" || master.startsWith("local["))
-  private val isYarn = (master == "yarn-standalone")
 
   // Create the Spark execution environment (cache, map output tracker, etc)
   private[spark] val env = SparkEnv.createFromSystemProperties(
@@ -579,17 +578,15 @@ class SparkContext(
       val uri = new URI(path)
       val key = uri.getScheme match {
         case null | "file" =>
-            if (!isYarn)
-                env.httpFileServer.addJar(new File(uri.getPath))
-            else
-                null
+          if (SparkHadoopUtil.isYarnMode()) {
+            logWarning("local jar specified as parameter to addJar under Yarn mode")
+            return 
+          }
+          env.httpFileServer.addJar(new File(uri.getPath))
         case _ => path
       }
-
-      if (key != null) {
-        addedJars(key) = System.currentTimeMillis
-        logInfo("Added JAR " + path + " at " + key + " with timestamp " + addedJars(key))
-      }
+      addedJars(key) = System.currentTimeMillis
+      logInfo("Added JAR " + path + " at " + key + " with timestamp " + addedJars(key))
     }
   }
 
-- 
GitLab