diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index 255be2e76ff49345caf95c3ed65c6ef48478505f..95d949ee3e5a4617a4f1a373ffb2ed6225cf2594 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -107,6 +107,7 @@ exportMethods("%in%",
               "cbrt",
               "ceil",
               "ceiling",
+              "column",
               "concat",
               "concat_ws",
               "contains",
diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R
index 42e9d12179db7332c2ef6a77ebeb4a0a164927df..20de3907b7dd967b9e8913808d44072ae109cc0f 100644
--- a/R/pkg/R/column.R
+++ b/R/pkg/R/column.R
@@ -36,13 +36,11 @@ setMethod("initialize", "Column", function(.Object, jc) {
   .Object
 })
 
-column <- function(jc) {
-  new("Column", jc)
-}
-
-col <- function(x) {
-  column(callJStatic("org.apache.spark.sql.functions", "col", x))
-}
+setMethod("column",
+          signature(x = "jobj"),
+          function(x) {
+            new("Column", x)
+          })
 
 #' @rdname show
 #' @name show
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index 94687edb054428f05a91dda8d46734fe91552941..a220ad8b9f58b96f04be2b8e69617d2069c2523c 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -233,6 +233,28 @@ setMethod("ceil",
             column(jc)
           })
 
+#' Though scala functions has "col" function, we don't expose it in SparkR
+#' because we don't want to conflict with the "col" function in the R base
+#' package and we also have "column" function exported which is an alias of "col".
+col <- function(x) {
+  column(callJStatic("org.apache.spark.sql.functions", "col", x))
+}
+
+#' column
+#'
+#' Returns a Column based on the given column name.
+#'
+#' @rdname col
+#' @name column
+#' @family normal_funcs
+#' @export
+#' @examples \dontrun{column(df)}
+setMethod("column",
+          signature(x = "character"),
+          function(x) {
+            col(x)
+          })
+
 #' cos
 #'
 #' Computes the cosine of the given value.
diff --git a/R/pkg/R/generics.R b/R/pkg/R/generics.R
index c4474131804bbbef1216c511dee123f28f871471..8fad17026c06f2044388a17e24d8ab5b8fd2a454 100644
--- a/R/pkg/R/generics.R
+++ b/R/pkg/R/generics.R
@@ -686,6 +686,10 @@ setGeneric("cbrt", function(x) { standardGeneric("cbrt") })
 #' @export
 setGeneric("ceil", function(x) { standardGeneric("ceil") })
 
+#' @rdname col
+#' @export
+setGeneric("column", function(x) { standardGeneric("column") })
+
 #' @rdname concat
 #' @export
 setGeneric("concat", function(x, ...) { standardGeneric("concat") })
diff --git a/R/pkg/inst/tests/test_sparkSQL.R b/R/pkg/inst/tests/test_sparkSQL.R
index 4804ecf1773410010fecf89277c05fdf0b307528..3a04edbb4c1163b722eb74c88bb1b31887b63884 100644
--- a/R/pkg/inst/tests/test_sparkSQL.R
+++ b/R/pkg/inst/tests/test_sparkSQL.R
@@ -787,7 +787,7 @@ test_that("test HiveContext", {
 })
 
 test_that("column operators", {
-  c <- SparkR:::col("a")
+  c <- column("a")
   c2 <- (- c + 1 - 2) * 3 / 4.0
   c3 <- (c + c2 - c2) * c2 %% c2
   c4 <- (c > c2) & (c2 <= c3) | (c == c2) & (c2 != c3)
@@ -795,7 +795,7 @@ test_that("column operators", {
 })
 
 test_that("column functions", {
-  c <- SparkR:::col("a")
+  c <- column("a")
   c1 <- abs(c) + acos(c) + approxCountDistinct(c) + ascii(c) + asin(c) + atan(c)
   c2 <- avg(c) + base64(c) + bin(c) + bitwiseNOT(c) + cbrt(c) + ceil(c) + cos(c)
   c3 <- cosh(c) + count(c) + crc32(c) + exp(c)