From 1b7eab74e64f554bbf892c8ef7b7ec00b359d2c0 Mon Sep 17 00:00:00 2001
From: felixcheung <felixcheung_m@hotmail.com>
Date: Sat, 23 Apr 2016 18:20:31 -0700
Subject: [PATCH] [SPARK-12148][SPARKR] fix doc after renaming DataFrame to
 SparkDataFrame

## What changes were proposed in this pull request?

Fixed inadvertent roxygen2 doc changes, added class name change to programming guide
Follow up of #12621

## How was this patch tested?

manually checked

Author: felixcheung <felixcheung_m@hotmail.com>

Closes #12647 from felixcheung/rdataframe.
---
 R/pkg/R/DataFrame.R | 16 ++++++++--------
 docs/sparkr.md      |  5 +++--
 2 files changed, 11 insertions(+), 10 deletions(-)

diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index 69feec735c..3b2fd73375 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -25,7 +25,7 @@ setOldClass("jobj")
 #' @title S4 class that represents a SparkDataFrame
 #' @description DataFrames can be created using functions like \link{createDataFrame},
 #'              \link{read.json}, \link{table} etc.
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname SparkDataFrame
 #' @docType class
 #'
@@ -68,7 +68,7 @@ dataFrame <- function(sdf, isCached = FALSE) {
 #'
 #' @param x A SparkDataFrame
 #'
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname printSchema
 #' @name printSchema
 #' @export
@@ -93,7 +93,7 @@ setMethod("printSchema",
 #'
 #' @param x A SparkDataFrame
 #'
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname schema
 #' @name schema
 #' @export
@@ -117,7 +117,7 @@ setMethod("schema",
 #'
 #' @param x A SparkDataFrame
 #' @param extended Logical. If extended is False, explain() only prints the physical plan.
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname explain
 #' @name explain
 #' @export
@@ -148,7 +148,7 @@ setMethod("explain",
 #'
 #' @param x A SparkDataFrame
 #'
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname isLocal
 #' @name isLocal
 #' @export
@@ -173,7 +173,7 @@ setMethod("isLocal",
 #' @param x A SparkDataFrame
 #' @param numRows The number of rows to print. Defaults to 20.
 #'
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname showDF
 #' @name showDF
 #' @export
@@ -198,7 +198,7 @@ setMethod("showDF",
 #'
 #' @param x A SparkDataFrame
 #'
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname show
 #' @name show
 #' @export
@@ -225,7 +225,7 @@ setMethod("show", "SparkDataFrame",
 #'
 #' @param x A SparkDataFrame
 #'
-#' @family SparkSparkDataFrame functions
+#' @family SparkDataFrame functions
 #' @rdname dtypes
 #' @name dtypes
 #' @export
diff --git a/docs/sparkr.md b/docs/sparkr.md
index 73e38b8c70..a0b4f93776 100644
--- a/docs/sparkr.md
+++ b/docs/sparkr.md
@@ -384,10 +384,11 @@ You can inspect the search path in R with [`search()`](https://stat.ethz.ch/R-ma
 
 # Migration Guide
 
-## Upgrading From SparkR 1.5.x to 1.6
+## Upgrading From SparkR 1.5.x to 1.6.x
 
- - Before Spark 1.6, the default mode for writes was `append`. It was changed in Spark 1.6.0 to `error` to match the Scala API.
+ - Before Spark 1.6.0, the default mode for writes was `append`. It was changed in Spark 1.6.0 to `error` to match the Scala API.
 
 ## Upgrading From SparkR 1.6.x to 2.0
 
  - The method `table` has been removed and replaced by `tableToDF`.
+ - The class `DataFrame` has been renamed to `SparkDataFrame` to avoid name conflicts.
-- 
GitLab