From 22b982d2bc76197c85ed50558a0fc676dffcc5ef Mon Sep 17 00:00:00 2001
From: Patrick Wendell <pwendell@gmail.com>
Date: Sat, 7 Sep 2013 14:37:54 -0700
Subject: [PATCH] File rename

---
 docs/_layouts/global.html                                | 2 +-
 docs/{cdh-hdp.md => hadoop-third-party-distributions.md} | 4 +---
 2 files changed, 2 insertions(+), 4 deletions(-)
 rename docs/{cdh-hdp.md => hadoop-third-party-distributions.md} (93%)

diff --git a/docs/_layouts/global.html b/docs/_layouts/global.html
index 3a3b8dce37..2f6bdcabe8 100755
--- a/docs/_layouts/global.html
+++ b/docs/_layouts/global.html
@@ -98,7 +98,7 @@
                             <ul class="dropdown-menu">
                                 <li><a href="configuration.html">Configuration</a></li>
                                 <li><a href="tuning.html">Tuning Guide</a></li>
-                                <li><a href="cdh-hdp.html">Running with CDH/HDP</a></li>
+                                <li><a href="hadoop-third-party-distributions.html">Running with CDH/HDP</a></li>
                                 <li><a href="hardware-provisioning.html">Hardware Provisioning</a></li>
                                 <li><a href="building-with-maven.html">Building Spark with Maven</a></li>
                                 <li><a href="contributing-to-spark.html">Contributing to Spark</a></li>
diff --git a/docs/cdh-hdp.md b/docs/hadoop-third-party-distributions.md
similarity index 93%
rename from docs/cdh-hdp.md
rename to docs/hadoop-third-party-distributions.md
index 679fb1100d..9f4f354525 100644
--- a/docs/cdh-hdp.md
+++ b/docs/hadoop-third-party-distributions.md
@@ -54,9 +54,7 @@ Spark can run in a variety of deployment modes:
   cores dedicated to Spark on each node.
 * Run Spark alongside Hadoop using a cluster resource manager, such as YARN or Mesos.
 
-These options are identical for those using CDH and HDP. Note that if you have a YARN cluster,
-but still prefer to run Spark on a dedicated set of nodes rather than scheduling through YARN, 
-use `mr1` versions of HADOOP_HOME when compiling.
+These options are identical for those using CDH and HDP. 
 
 # Inheriting Cluster Configuration
 If you plan to read and write from HDFS using Spark, there are two Hadoop configuration files that
-- 
GitLab