diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index ba386da2aa178636207644dacd8388017b75d0a6..a8cf53fd46c2ec519c39f0c52353677ab57d3825 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -45,6 +45,7 @@ exportMethods("arrange",
               "corr",
               "covar_samp",
               "covar_pop",
+              "createOrReplaceTempView",
               "crosstab",
               "dapply",
               "dapplyCollect",
@@ -80,7 +81,6 @@ exportMethods("arrange",
               "persist",
               "printSchema",
               "rbind",
-              "registerTempTable",
               "rename",
               "repartition",
               "sample",
diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index 30a567523fa56f780252206c65611b8a023c93df..0ff350d44d4b3160d07f3b0a1c56e491243b2172 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -428,16 +428,17 @@ setMethod("coltypes<-",
             dataFrame(nx@sdf)
           })
 
-#' Register Temporary Table
+#' Creates a temporary view using the given name.
 #'
-#' Registers a SparkDataFrame as a Temporary Table in the SQLContext
+#' Creates a new temporary view using a SparkDataFrame in the SQLContext. If a
+#' temporary view with the same name already exists, replaces it.
 #'
 #' @param x A SparkDataFrame
-#' @param tableName A character vector containing the name of the table
+#' @param viewName A character vector containing the name of the table
 #'
 #' @family SparkDataFrame functions
-#' @rdname registerTempTable
-#' @name registerTempTable
+#' @rdname createOrReplaceTempView
+#' @name createOrReplaceTempView
 #' @export
 #' @examples
 #'\dontrun{
@@ -445,13 +446,13 @@ setMethod("coltypes<-",
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "json_df")
+#' createOrReplaceTempView(df, "json_df")
 #' new_df <- sql("SELECT * FROM json_df")
 #'}
-setMethod("registerTempTable",
-          signature(x = "SparkDataFrame", tableName = "character"),
-          function(x, tableName) {
-              invisible(callJMethod(x@sdf, "registerTempTable", tableName))
+setMethod("createOrReplaceTempView",
+          signature(x = "SparkDataFrame", viewName = "character"),
+          function(x, viewName) {
+              invisible(callJMethod(x@sdf, "createOrReplaceTempView", viewName))
           })
 
 #' insertInto
@@ -473,7 +474,7 @@ setMethod("registerTempTable",
 #' sqlContext <- sparkRSQL.init(sc)
 #' df <- read.df(path, "parquet")
 #' df2 <- read.df(path2, "parquet")
-#' registerTempTable(df, "table1")
+#' createOrReplaceTempView(df, "table1")
 #' insertInto(df2, "table1", overwrite = TRUE)
 #'}
 setMethod("insertInto",
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index e7e9e353f9e82179e1f1a6a79255b8b2fdfb0e17..914b02a47ad67641452464ebb0af7be8f59eb8de 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -411,7 +411,7 @@ read.text <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' new_df <- sql("SELECT * FROM table")
 #' }
 #' @name sql
@@ -443,7 +443,7 @@ sql <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' new_df <- tableToDF("table")
 #' }
 #' @note since 2.0.0
@@ -529,7 +529,7 @@ tableNames <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' cacheTable("table")
 #' }
 #' @name cacheTable
@@ -558,7 +558,7 @@ cacheTable <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' uncacheTable("table")
 #' }
 #' @name uncacheTable
@@ -608,7 +608,7 @@ clearCache <- function() {
 #' sc <- sparkR.init()
 #' sqlContext <- sparkRSQL.init(sc)
 #' df <- read.df(path, "parquet")
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' dropTempTable("table")
 #' }
 #' @name dropTempTable
diff --git a/R/pkg/R/generics.R b/R/pkg/R/generics.R
index f0cde56b133f5ffa50a523e02edfaf1c1bd2e26f..50fc204f998a50db76bd8bcdbb1b2cfceae38f0c 100644
--- a/R/pkg/R/generics.R
+++ b/R/pkg/R/generics.R
@@ -547,9 +547,12 @@ setGeneric("printSchema", function(x) { standardGeneric("printSchema") })
 #' @export
 setGeneric("rename", function(x, ...) { standardGeneric("rename") })
 
-#' @rdname registerTempTable
+#' @rdname createOrReplaceTempView
 #' @export
-setGeneric("registerTempTable", function(x, tableName) { standardGeneric("registerTempTable") })
+setGeneric("createOrReplaceTempView",
+           function(x, viewName) {
+             standardGeneric("createOrReplaceTempView")
+           })
 
 #' @rdname sample
 #' @export
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 375cb6f58808d421f7ba026734e4c1bd00a5137c..d1ca3b726fe0b2a93494cf9d2da14e6502481fe9 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -445,7 +445,7 @@ test_that("jsonRDD() on a RDD with json string", {
 
 test_that("test cache, uncache and clearCache", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   cacheTable("table1")
   uncacheTable("table1")
   clearCache()
@@ -454,16 +454,17 @@ test_that("test cache, uncache and clearCache", {
 
 test_that("test tableNames and tables", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   expect_equal(length(tableNames()), 1)
   df <- tables()
   expect_equal(count(df), 1)
   dropTempTable("table1")
 })
 
-test_that("registerTempTable() results in a queryable table and sql() results in a new DataFrame", {
+test_that(
+  "createOrReplaceTempView() results in a queryable table and sql() results in a new DataFrame", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   newdf <- sql("SELECT * FROM table1 where name = 'Michael'")
   expect_is(newdf, "SparkDataFrame")
   expect_equal(count(newdf), 1)
@@ -484,13 +485,13 @@ test_that("insertInto() on a registered table", {
   write.df(df2, parquetPath2, "parquet", "overwrite")
   dfParquet2 <- read.df(parquetPath2, "parquet")
 
-  registerTempTable(dfParquet, "table1")
+  createOrReplaceTempView(dfParquet, "table1")
   insertInto(dfParquet2, "table1")
   expect_equal(count(sql("select * from table1")), 5)
   expect_equal(first(sql("select * from table1 order by age"))$name, "Michael")
   dropTempTable("table1")
 
-  registerTempTable(dfParquet, "table1")
+  createOrReplaceTempView(dfParquet, "table1")
   insertInto(dfParquet2, "table1", overwrite = TRUE)
   expect_equal(count(sql("select * from table1")), 2)
   expect_equal(first(sql("select * from table1 order by age"))$name, "Bob")
@@ -502,7 +503,7 @@ test_that("insertInto() on a registered table", {
 
 test_that("tableToDF() returns a new DataFrame", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   tabledf <- tableToDF("table1")
   expect_is(tabledf, "SparkDataFrame")
   expect_equal(count(tabledf), 3)