From c0052d8d09eebadadb5ed35ac512caaf73919551 Mon Sep 17 00:00:00 2001
From: Jeff Zhang <zjffdu@apache.org>
Date: Wed, 9 Sep 2015 10:26:53 -0700
Subject: [PATCH] =?UTF-8?q?[SPARK-10481]=20[YARN]=20SPARK=5FPREPEND=5FCLAS?=
 =?UTF-8?q?SES=20make=20spark-yarn=20related=20jar=20could=20n=E2=80=A6?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Throw a more readable exception. Please help review. Thanks

Author: Jeff Zhang <zjffdu@apache.org>

Closes #8649 from zjffdu/SPARK-10481.
---
 .../src/main/scala/org/apache/spark/deploy/yarn/Client.scala | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index e9a02baafd..a2c4bc2f54 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -1045,7 +1045,10 @@ object Client extends Logging {
           s"in favor of the $CONF_SPARK_JAR configuration variable.")
       System.getenv(ENV_SPARK_JAR)
     } else {
-      SparkContext.jarOfClass(this.getClass).head
+      SparkContext.jarOfClass(this.getClass).getOrElse(throw new SparkException("Could not "
+        + "find jar containing Spark classes. The jar can be defined using the "
+        + "spark.yarn.jar configuration option. If testing Spark, either set that option or "
+        + "make sure SPARK_PREPEND_CLASSES is not set."))
     }
   }
 
-- 
GitLab