From f85aa06464a10f5d1563302fd76465dded475a12 Mon Sep 17 00:00:00 2001
From: Joshi <rekhajoshm@gmail.com>
Date: Fri, 2 Oct 2015 15:26:11 -0700
Subject: [PATCH] [SPARK-10317] [CORE] Compatibility between history server
 script and functionality

Compatibility between history server script and functionality

The history server has its argument parsing class in HistoryServerArguments. However, this doesn't get involved in the start-history-server.sh codepath where the $0 arg is assigned to spark.history.fs.logDirectory and all other arguments discarded (e.g --property-file.)
This stops the other options being usable from this script

Author: Joshi <rekhajoshm@gmail.com>
Author: Rekha Joshi <rekhajoshm@gmail.com>

Closes #8758 from rekhajoshm/SPARK-10317.
---
 .../history/HistoryServerArguments.scala      | 40 +++++++----
 .../history/HistoryServerArgumentsSuite.scala | 70 +++++++++++++++++++
 sbin/start-history-server.sh                  |  8 +--
 3 files changed, 96 insertions(+), 22 deletions(-)
 create mode 100644 core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala

diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
index 18265df9fa..d03bab3820 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
@@ -30,28 +30,35 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
   parse(args.toList)
 
   private def parse(args: List[String]): Unit = {
-    args match {
-      case ("--dir" | "-d") :: value :: tail =>
-        logWarning("Setting log directory through the command line is deprecated as of " +
-          "Spark 1.1.0. Please set this through spark.history.fs.logDirectory instead.")
-        conf.set("spark.history.fs.logDirectory", value)
-        System.setProperty("spark.history.fs.logDirectory", value)
-        parse(tail)
+    if (args.length == 1) {
+      setLogDirectory(args.head)
+    } else {
+      args match {
+        case ("--dir" | "-d") :: value :: tail =>
+          setLogDirectory(value)
+          parse(tail)
 
-      case ("--help" | "-h") :: tail =>
-        printUsageAndExit(0)
+        case ("--help" | "-h") :: tail =>
+          printUsageAndExit(0)
 
-      case ("--properties-file") :: value :: tail =>
-        propertiesFile = value
-        parse(tail)
+        case ("--properties-file") :: value :: tail =>
+          propertiesFile = value
+          parse(tail)
 
-      case Nil =>
+        case Nil =>
 
-      case _ =>
-        printUsageAndExit(1)
+        case _ =>
+          printUsageAndExit(1)
+      }
     }
   }
 
+  private def setLogDirectory(value: String): Unit = {
+    logWarning("Setting log directory through the command line is deprecated as of " +
+      "Spark 1.1.0. Please set this through spark.history.fs.logDirectory instead.")
+    conf.set("spark.history.fs.logDirectory", value)
+  }
+
    // This mutates the SparkConf, so all accesses to it must be made after this line
    Utils.loadDefaultSparkProperties(conf, propertiesFile)
 
@@ -62,6 +69,8 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
       |Usage: HistoryServer [options]
       |
       |Options:
+      |  DIR                         Deprecated; set spark.history.fs.logDirectory directly
+      |  --dir DIR (-d DIR)          Deprecated; set spark.history.fs.logDirectory directly
       |  --properties-file FILE      Path to a custom Spark properties file.
       |                              Default is conf/spark-defaults.conf.
       |
@@ -90,3 +99,4 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
   }
 
 }
+
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala
new file mode 100644
index 0000000000..34f27ecaa0
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.deploy.history
+
+import java.io.File
+import java.nio.charset.StandardCharsets._
+
+import com.google.common.io.Files
+
+import org.apache.spark._
+import org.apache.spark.util.Utils
+
+class HistoryServerArgumentsSuite extends SparkFunSuite {
+
+  private val logDir = new File("src/test/resources/spark-events")
+  private val conf = new SparkConf()
+    .set("spark.history.fs.logDirectory", logDir.getAbsolutePath)
+    .set("spark.history.fs.updateInterval", "1")
+    .set("spark.testing", "true")
+
+  test("No Arguments Parsing") {
+    val argStrings = Array[String]()
+    val hsa = new HistoryServerArguments(conf, argStrings)
+    assert(conf.get("spark.history.fs.logDirectory") === logDir.getAbsolutePath)
+    assert(conf.get("spark.history.fs.updateInterval") === "1")
+    assert(conf.get("spark.testing") === "true")
+  }
+
+  test("Directory Arguments Parsing --dir or -d") {
+    val argStrings = Array("--dir", "src/test/resources/spark-events1")
+    val hsa = new HistoryServerArguments(conf, argStrings)
+    assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events1")
+  }
+
+  test("Directory Param can also be set directly") {
+    val argStrings = Array("src/test/resources/spark-events2")
+    val hsa = new HistoryServerArguments(conf, argStrings)
+    assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events2")
+  }
+
+  test("Properties File Arguments Parsing --properties-file") {
+    val tmpDir = Utils.createTempDir()
+    val outFile = File.createTempFile("test-load-spark-properties", "test", tmpDir)
+    try {
+      Files.write("spark.test.CustomPropertyA blah\n" +
+        "spark.test.CustomPropertyB notblah\n", outFile, UTF_8)
+      val argStrings = Array("--properties-file", outFile.getAbsolutePath)
+      val hsa = new HistoryServerArguments(conf, argStrings)
+      assert(conf.get("spark.test.CustomPropertyA") === "blah")
+      assert(conf.get("spark.test.CustomPropertyB") === "notblah")
+    } finally {
+      Utils.deleteRecursively(tmpDir)
+    }
+  }
+
+}
diff --git a/sbin/start-history-server.sh b/sbin/start-history-server.sh
index 7172ad15d8..9034e5715c 100755
--- a/sbin/start-history-server.sh
+++ b/sbin/start-history-server.sh
@@ -30,10 +30,4 @@ sbin="`cd "$sbin"; pwd`"
 . "$sbin/spark-config.sh"
 . "$SPARK_PREFIX/bin/load-spark-env.sh"
 
-if [ $# != 0 ]; then
-  echo "Using command line arguments for setting the log directory is deprecated. Please "
-  echo "set the spark.history.fs.logDirectory configuration option instead."
-  export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=$1"
-fi
-
-exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1
+exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 $@
-- 
GitLab