From 50248dcfff3ba79b73323f3a804c1e19a8be6097 Mon Sep 17 00:00:00 2001
From: bomeng <bmeng@us.ibm.com>
Date: Sun, 12 Jun 2016 14:25:48 +0100
Subject: [PATCH] [SPARK-15806][DOCUMENTATION] update doc for SPARK_MASTER_IP

## What changes were proposed in this pull request?

SPARK_MASTER_IP is a deprecated environment variable. It is replaced by SPARK_MASTER_HOST according to MasterArguments.scala.

## How was this patch tested?

Manually verified.

Author: bomeng <bmeng@us.ibm.com>

Closes #13543 from bomeng/SPARK-15806.
---
 conf/spark-env.sh.template                                | 2 +-
 .../org/apache/spark/deploy/master/MasterArguments.scala  | 8 +++++++-
 docs/spark-standalone.md                                  | 4 ++--
 sbin/start-master.sh                                      | 6 +++---
 sbin/start-slaves.sh                                      | 6 +++---
 5 files changed, 16 insertions(+), 10 deletions(-)

diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template
index 9cffdc30c2..c750c72d19 100755
--- a/conf/spark-env.sh.template
+++ b/conf/spark-env.sh.template
@@ -42,7 +42,7 @@
 # - SPARK_DRIVER_MEMORY, Memory for Driver (e.g. 1000M, 2G) (Default: 1G)
 
 # Options for the daemons used in the standalone deploy mode
-# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname
+# - SPARK_MASTER_HOST, to bind the master to a different IP address or hostname
 # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master
 # - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y")
 # - SPARK_WORKER_CORES, to set the number of cores to use on this machine
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
index 585e0839d0..c63793c16d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
@@ -20,18 +20,24 @@ package org.apache.spark.deploy.master
 import scala.annotation.tailrec
 
 import org.apache.spark.SparkConf
+import org.apache.spark.internal.Logging
 import org.apache.spark.util.{IntParam, Utils}
 
 /**
  * Command-line parser for the master.
  */
-private[master] class MasterArguments(args: Array[String], conf: SparkConf) {
+private[master] class MasterArguments(args: Array[String], conf: SparkConf) extends Logging {
   var host = Utils.localHostName()
   var port = 7077
   var webUiPort = 8080
   var propertiesFile: String = null
 
   // Check for settings in environment variables
+  if (System.getenv("SPARK_MASTER_IP") != null) {
+    logWarning("SPARK_MASTER_IP is deprecated, please use SPARK_MASTER_HOST")
+    host = System.getenv("SPARK_MASTER_IP")
+  }
+
   if (System.getenv("SPARK_MASTER_HOST") != null) {
     host = System.getenv("SPARK_MASTER_HOST")
   }
diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md
index 40c72931cb..c864c90308 100644
--- a/docs/spark-standalone.md
+++ b/docs/spark-standalone.md
@@ -94,8 +94,8 @@ You can optionally configure the cluster further by setting environment variable
 <table class="table">
   <tr><th style="width:21%">Environment Variable</th><th>Meaning</th></tr>
   <tr>
-    <td><code>SPARK_MASTER_IP</code></td>
-    <td>Bind the master to a specific IP address, for example a public one.</td>
+    <td><code>SPARK_MASTER_HOST</code></td>
+    <td>Bind the master to a specific hostname or IP address, for example a public one.</td>
   </tr>
   <tr>
     <td><code>SPARK_MASTER_PORT</code></td>
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index ce7f177959..981cb15bc0 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -47,8 +47,8 @@ if [ "$SPARK_MASTER_PORT" = "" ]; then
   SPARK_MASTER_PORT=7077
 fi
 
-if [ "$SPARK_MASTER_IP" = "" ]; then
-  SPARK_MASTER_IP=`hostname`
+if [ "$SPARK_MASTER_HOST" = "" ]; then
+  SPARK_MASTER_HOST=`hostname`
 fi
 
 if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
@@ -56,5 +56,5 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
 fi
 
 "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS 1 \
-  --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
+  --host $SPARK_MASTER_HOST --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
   $ORIGINAL_ARGS
diff --git a/sbin/start-slaves.sh b/sbin/start-slaves.sh
index 5bf2b83b42..0fa1605489 100755
--- a/sbin/start-slaves.sh
+++ b/sbin/start-slaves.sh
@@ -31,9 +31,9 @@ if [ "$SPARK_MASTER_PORT" = "" ]; then
   SPARK_MASTER_PORT=7077
 fi
 
-if [ "$SPARK_MASTER_IP" = "" ]; then
-  SPARK_MASTER_IP="`hostname`"
+if [ "$SPARK_MASTER_HOST" = "" ]; then
+  SPARK_MASTER_HOST="`hostname`"
 fi
 
 # Launch the slaves
-"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/start-slave.sh" "spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/start-slave.sh" "spark://$SPARK_MASTER_HOST:$SPARK_MASTER_PORT"
-- 
GitLab