diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index e3a8e83f3eaa5a8c312218736ff6824a3e5436df..df279b5a37c7d17496c276ee609eb868e8feac92 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -754,7 +754,8 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
 
   test("isDynamicAllocationEnabled") {
     val conf = new SparkConf()
-    conf.set("spark.master", "yarn-client")
+    conf.set("spark.master", "yarn")
+    conf.set("spark.submit.deployMode", "client")
     assert(Utils.isDynamicAllocationEnabled(conf) === false)
     assert(Utils.isDynamicAllocationEnabled(
       conf.set("spark.dynamicAllocation.enabled", "false")) === false)
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index aec0215b40945823a8c1c0c451a67ea7409aae48..7217a9907a48253e476d388aef1a68ac2664afa5 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -155,10 +155,6 @@ class SparkContext(object):
         self.appName = self._conf.get("spark.app.name")
         self.sparkHome = self._conf.get("spark.home", None)
 
-        # Let YARN know it's a pyspark app, so it distributes needed libraries.
-        if self.master == "yarn-client":
-            self._conf.set("spark.yarn.isPython", "true")
-
         for (k, v) in self._conf.getAll():
             if k.startswith("spark.executorEnv."):
                 varName = k[len("spark.executorEnv."):]
diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 8fcab386ec8f9d83f20cd0ccd5130a87fe90c9bf..e8710041737044d4896acd9b0017d4d57b7be7e2 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -943,8 +943,6 @@ class SparkILoop(
       })
 
   private def process(settings: Settings): Boolean = savingContextLoader {
-    if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
-
     this.settings = settings
     createInterpreter()
 
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 4ce33e0e8565d144e50fe9fe08d586ae046add48..6b20dea5908a876545d9474245afbb29b6798f0b 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -312,7 +312,7 @@ private object YarnClusterDriver extends Logging with Matchers {
 
     // If we are running in yarn-cluster mode, verify that driver logs links and present and are
     // in the expected format.
-    if (conf.get("spark.master") == "yarn-cluster") {
+    if (conf.get("spark.submit.deployMode") == "cluster") {
       assert(listener.driverLogs.nonEmpty)
       val driverLogs = listener.driverLogs.get
       assert(driverLogs.size === 2)