diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java index 587fda7a3c1da538a34481d54d3e536ca3b9fc47..d02b2a499455e3ca27c7103e388607f6539a9acd 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java @@ -174,7 +174,7 @@ abstract class AbstractCommandBuilder { // Add Spark jars to the classpath. For the testing case, we rely on the test code to set and // propagate the test classpath appropriately. For normal invocation, look for the jars // directory under SPARK_HOME. - String jarsDir = findJarsDir(!isTesting); + String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting); if (jarsDir != null) { addToClassPath(cp, join(File.separator, jarsDir, "*")); } @@ -311,27 +311,6 @@ abstract class AbstractCommandBuilder { return props; } - private String findJarsDir(boolean failIfNotFound) { - // TODO: change to the correct directory once the assembly build is changed. - String sparkHome = getSparkHome(); - File libdir; - if (new File(sparkHome, "RELEASE").isFile()) { - libdir = new File(sparkHome, "lib"); - checkState(!failIfNotFound || libdir.isDirectory(), - "Library directory '%s' does not exist.", - libdir.getAbsolutePath()); - } else { - libdir = new File(sparkHome, String.format("assembly/target/scala-%s", getScalaVersion())); - if (!libdir.isDirectory()) { - checkState(!failIfNotFound, - "Library directory '%s' does not exist; make sure Spark is built.", - libdir.getAbsolutePath()); - libdir = null; - } - } - return libdir != null ? libdir.getAbsolutePath() : null; - } - private String getConfDir() { String confDir = getenv("SPARK_CONF_DIR"); return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf"); diff --git a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java index 39fdf300e26cd009acde046fe6e4bac91d61bf9e..1e55aad5c918b356b4163cfb39252a7925ea61e7 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java +++ b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java @@ -349,4 +349,29 @@ class CommandBuilderUtils { return Integer.parseInt(version[1]); } } + + /** + * Find the location of the Spark jars dir, depending on whether we're looking at a build + * or a distribution directory. + */ + static String findJarsDir(String sparkHome, String scalaVersion, boolean failIfNotFound) { + // TODO: change to the correct directory once the assembly build is changed. + File libdir; + if (new File(sparkHome, "RELEASE").isFile()) { + libdir = new File(sparkHome, "lib"); + checkState(!failIfNotFound || libdir.isDirectory(), + "Library directory '%s' does not exist.", + libdir.getAbsolutePath()); + } else { + libdir = new File(sparkHome, String.format("assembly/target/scala-%s", scalaVersion)); + if (!libdir.isDirectory()) { + checkState(!failIfNotFound, + "Library directory '%s' does not exist; make sure Spark is built.", + libdir.getAbsolutePath()); + libdir = null; + } + } + return libdir != null ? libdir.getAbsolutePath() : null; + } + } diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala index 6bbc8c2dfa19ac3fe87e13b93482ae8f01ce85da..7b29c1ae4d888ee29536bb16b7f6c149b364e850 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala @@ -468,12 +468,11 @@ private[spark] class Client( // No configuration, so fall back to uploading local jar files. logWarning(s"Neither ${SPARK_JARS.key} nor ${SPARK_ARCHIVE.key} is set, falling back " + "to uploading libraries under SPARK_HOME.") - val jarsDir = new File(sparkConf.getenv("SPARK_HOME"), "lib") - if (jarsDir.isDirectory()) { - jarsDir.listFiles().foreach { f => - if (f.isFile() && f.getName().toLowerCase().endsWith(".jar")) { - distribute(f.getAbsolutePath(), targetDir = Some(LOCALIZED_LIB_DIR)) - } + val jarsDir = new File(YarnCommandBuilderUtils.findJarsDir( + sparkConf.getenv("SPARK_HOME"))) + jarsDir.listFiles().foreach { f => + if (f.isFile() && f.getName().toLowerCase().endsWith(".jar")) { + distribute(f.getAbsolutePath(), targetDir = Some(LOCALIZED_LIB_DIR)) } } } diff --git a/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala b/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala index 7d246bf40712182cd7e18d81b164b9fec53bfa3b..6c3556a2ee43e6886271d62ba46c496387d805e7 100644 --- a/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala +++ b/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala @@ -19,6 +19,7 @@ package org.apache.spark.launcher import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer +import scala.util.Properties /** * Exposes methods from the launcher library that are used by the YARN backend. @@ -29,6 +30,14 @@ private[spark] object YarnCommandBuilderUtils { CommandBuilderUtils.quoteForBatchScript(arg) } + def findJarsDir(sparkHome: String): String = { + val scalaVer = Properties.versionNumberString + .split("\\.") + .take(2) + .mkString(".") + CommandBuilderUtils.findJarsDir(sparkHome, scalaVer, true) + } + /** * Adds the perm gen configuration to the list of java options if needed and not yet added. * diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala index 24472e006b8754df0b8b7e4551c4525b235ce6ad..e3613a93ed05f79669fb0ea730e928dd53ccea04 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy.yarn -import java.io.File +import java.io.{File, FileOutputStream} import java.net.URI import java.util.Properties @@ -274,6 +274,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll val jarsDir = new File(temp, "lib") assert(jarsDir.mkdir()) val jar = TestUtils.createJarWithFiles(Map(), jarsDir) + new FileOutputStream(new File(temp, "RELEASE")).close() val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> temp.getAbsolutePath())) val client = createClient(sparkConf)