diff --git a/core/src/main/scala/spark/ui/SparkUI.scala b/core/src/main/scala/spark/ui/SparkUI.scala index e078c4a6b2486ad7946fdab26af29a4f185a4ffe..16bc053c2f75c4aa3c750b7082923bd7ffafeb6c 100644 --- a/core/src/main/scala/spark/ui/SparkUI.scala +++ b/core/src/main/scala/spark/ui/SparkUI.scala @@ -1,4 +1,5 @@ -/* * Licensed to the Apache Software Foundation (ASF) under one or more +/* + * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 diff --git a/core/src/main/scala/spark/ui/UIUtils.scala b/core/src/main/scala/spark/ui/UIUtils.scala index 6b45679f9d53c762a654d9720817356db96334b8..92eff8ee6a0ec437bea3aa308dc1b0bef3ed2086 100644 --- a/core/src/main/scala/spark/ui/UIUtils.scala +++ b/core/src/main/scala/spark/ui/UIUtils.scala @@ -48,14 +48,14 @@ private[spark] object UIUtils { /** Returns a spark page with correctly formatted headers */ def headerSparkPage(content: => Seq[Node], sc: SparkContext, title: String, page: Page.Value) : Seq[Node] = { - val storage = page match { - case Storage => <li class="active"><a href={storageStr}>Storage</a></li> - case _ => <li><a href={storageStr}>Storage</a></li> - } val jobs = page match { case Jobs => <li class="active"><a href={stagesStr}>Jobs</a></li> case _ => <li><a href={stagesStr}>Jobs</a></li> } + val storage = page match { + case Storage => <li class="active"><a href={storageStr}>Storage</a></li> + case _ => <li><a href={storageStr}>Storage</a></li> + } val environment = page match { case Environment => <li class="active"><a href={envStr}>Environment</a></li> case _ => <li><a href={envStr}>Environment</a></li> diff --git a/docs/running-on-yarn.md b/docs/running-on-yarn.md index 3f0d077f7128a5e4a001a712775bf9490ba84aba..2f0d71fc35ac481d4c97a361dbb2e984927f1d1f 100644 --- a/docs/running-on-yarn.md +++ b/docs/running-on-yarn.md @@ -49,7 +49,7 @@ Most of the configs are the same for Spark on YARN as other deploys. See the Con Environment variables: * `SPARK_YARN_USER_ENV`, to add environment variables to the Spark processes launched on YARN. This can be a comma separated list of environment variables. ie SPARK_YARN_USER_ENV="JAVA_HOME=/jdk64,FOO=bar" -Properties: +System Properties: * 'spark.yarn.applicationMaster.waitTries', property to set the number of times the ApplicationMaster waits for the the spark master and then also the number of tries it waits for the Spark Context to be intialized. Default is 10. # Launching Spark on YARN