From d2fde6b72c4aede2e7edb4a7e6653fb1e7b19924 Mon Sep 17 00:00:00 2001
From: Junyang Qian <junyangq@databricks.com>
Date: Fri, 2 Sep 2016 21:11:57 -0700
Subject: [PATCH] [SPARKR][MINOR] Fix docs for sparkR.session and count

## What changes were proposed in this pull request?

This PR tries to add some more explanation to `sparkR.session`. It also modifies doc for `count` so when grouped in one doc, the description doesn't confuse users.

## How was this patch tested?

Manual test.

![screen shot 2016-09-02 at 1 21 36 pm](https://cloud.githubusercontent.com/assets/15318264/18217198/409613ac-7110-11e6-8dae-cb0c8df557bf.png)

Author: Junyang Qian <junyangq@databricks.com>

Closes #14942 from junyangq/fixSparkRSessionDoc.
---
 R/pkg/R/functions.R | 3 ++-
 R/pkg/R/group.R     | 2 +-
 R/pkg/R/sparkR.R    | 6 ++++--
 3 files changed, 7 insertions(+), 4 deletions(-)

diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index 369b1d00d9..ceedbe7671 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -444,7 +444,8 @@ setMethod("cosh",
 
 #' Returns the number of items in a group
 #'
-#' Returns the number of items in a group. This is a column aggregate function.
+#' This can be used as a column aggregate function with \code{Column} as input,
+#' and returns the number of items in a group.
 #'
 #' @rdname count
 #' @name count
diff --git a/R/pkg/R/group.R b/R/pkg/R/group.R
index e3479ef5fa..17f5283abe 100644
--- a/R/pkg/R/group.R
+++ b/R/pkg/R/group.R
@@ -57,7 +57,7 @@ setMethod("show", "GroupedData",
 
 #' Count
 #'
-#' Count the number of rows for each group.
+#' Count the number of rows for each group when we have \code{GroupedData} input.
 #' The resulting SparkDataFrame will also contain the grouping columns.
 #'
 #' @return A SparkDataFrame.
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index de53b0bf79..15afe01c24 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -314,8 +314,10 @@ sparkRHive.init <- function(jsc = NULL) {
 
 #' Get the existing SparkSession or initialize a new SparkSession.
 #'
-#' Additional Spark properties can be set (...), and these named parameters take priority over
-#' over values in master, appName, named lists of sparkConfig.
+#' SparkSession is the entry point into SparkR. \code{sparkR.session} gets the existing
+#' SparkSession or initializes a new SparkSession.
+#' Additional Spark properties can be set in \code{...}, and these named parameters take priority
+#' over values in \code{master}, \code{appName}, named lists of \code{sparkConfig}.
 #'
 #' For details on how to initialize and use SparkR, refer to SparkR programming guide at
 #' \url{http://spark.apache.org/docs/latest/sparkr.html#starting-up-sparksession}.
-- 
GitLab