From b293afc42c370da0ff308c16ba4af81289ea6f89 Mon Sep 17 00:00:00 2001
From: Marcelo Vanzin <vanzin@cloudera.com>
Date: Tue, 24 Mar 2015 13:48:33 +0000
Subject: [PATCH] [SPARK-6473] [core] Do not try to figure out Scala version if
 not needed...

....

Author: Marcelo Vanzin <vanzin@cloudera.com>

Closes #5143 from vanzin/SPARK-6473 and squashes the following commits:

a2e5e2d [Marcelo Vanzin] [SPARK-6473] [core] Do not try to figure out Scala version if not needed.
---
 .../org/apache/spark/launcher/AbstractCommandBuilder.java | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index dc90e9e987..2da5f72787 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -147,7 +147,6 @@ abstract class AbstractCommandBuilder {
    */
   List<String> buildClassPath(String appClassPath) throws IOException {
     String sparkHome = getSparkHome();
-    String scala = getScalaVersion();
 
     List<String> cp = new ArrayList<String>();
     addToClassPath(cp, getenv("SPARK_CLASSPATH"));
@@ -158,6 +157,7 @@ abstract class AbstractCommandBuilder {
     boolean prependClasses = !isEmpty(getenv("SPARK_PREPEND_CLASSES"));
     boolean isTesting = "1".equals(getenv("SPARK_TESTING"));
     if (prependClasses || isTesting) {
+      String scala = getScalaVersion();
       List<String> projects = Arrays.asList("core", "repl", "mllib", "bagel", "graphx",
         "streaming", "tools", "sql/catalyst", "sql/core", "sql/hive", "sql/hive-thriftserver",
         "yarn", "launcher");
@@ -182,7 +182,7 @@ abstract class AbstractCommandBuilder {
       addToClassPath(cp, String.format("%s/core/target/jars/*", sparkHome));
     }
 
-    String assembly = findAssembly(scala);
+    String assembly = findAssembly();
     addToClassPath(cp, assembly);
 
     // When Hive support is needed, Datanucleus jars must be included on the classpath. Datanucleus
@@ -330,7 +330,7 @@ abstract class AbstractCommandBuilder {
     return firstNonEmpty(childEnv.get(key), System.getenv(key));
   }
 
-  private String findAssembly(String scalaVersion) {
+  private String findAssembly() {
     String sparkHome = getSparkHome();
     File libdir;
     if (new File(sparkHome, "RELEASE").isFile()) {
@@ -338,7 +338,7 @@ abstract class AbstractCommandBuilder {
       checkState(libdir.isDirectory(), "Library directory '%s' does not exist.",
           libdir.getAbsolutePath());
     } else {
-      libdir = new File(sparkHome, String.format("assembly/target/scala-%s", scalaVersion));
+      libdir = new File(sparkHome, String.format("assembly/target/scala-%s", getScalaVersion()));
     }
 
     final Pattern re = Pattern.compile("spark-assembly.*hadoop.*\\.jar");
-- 
GitLab