diff --git a/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
index df55be12545e120101d8364d5305094ffa5367d4..9f040faac3b74f0479bb7f412f70db56373278fe 100644
--- a/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/hadoop1/scala/spark/deploy/SparkHadoopUtil.scala
@@ -41,4 +41,7 @@ object SparkHadoopUtil {
 
   // add any user credentials to the job conf which are necessary for running on a secure Hadoop cluster
   def addCredentials(conf: JobConf) {}
+
+  def isYarnMode(): Boolean = { False }
+
 }
diff --git a/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
index df55be12545e120101d8364d5305094ffa5367d4..9f040faac3b74f0479bb7f412f70db56373278fe 100644
--- a/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/hadoop2/scala/spark/deploy/SparkHadoopUtil.scala
@@ -41,4 +41,7 @@ object SparkHadoopUtil {
 
   // add any user credentials to the job conf which are necessary for running on a secure Hadoop cluster
   def addCredentials(conf: JobConf) {}
+
+  def isYarnMode(): Boolean = { False }
+
 }
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index 957c541ecf60349e157cbfa62432ed78fb097b77..c01e315e357b3e0555cdb1c3218e8f0f44f77c23 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -102,7 +102,6 @@ class SparkContext(
   }
 
   private val isLocal = (master == "local" || master.startsWith("local["))
-  private val isYarn = (master == "yarn-standalone")
 
   // Create the Spark execution environment (cache, map output tracker, etc)
   private[spark] val env = SparkEnv.createFromSystemProperties(
@@ -579,17 +578,15 @@ class SparkContext(
       val uri = new URI(path)
       val key = uri.getScheme match {
         case null | "file" =>
-            if (!isYarn)
-                env.httpFileServer.addJar(new File(uri.getPath))
-            else
-                null
+          if (SparkHadoopUtil.isYarnMode()) {
+            logWarning("local jar specified as parameter to addJar under Yarn mode")
+            return 
+          }
+          env.httpFileServer.addJar(new File(uri.getPath))
         case _ => path
       }
-
-      if (key != null) {
-        addedJars(key) = System.currentTimeMillis
-        logInfo("Added JAR " + path + " at " + key + " with timestamp " + addedJars(key))
-      }
+      addedJars(key) = System.currentTimeMillis
+      logInfo("Added JAR " + path + " at " + key + " with timestamp " + addedJars(key))
     }
   }