diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index fabab32592af996353fc5b40b4bffc2d6839c3e8..00c3db0aac1ac327946e6419cc224c18e230ca7b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -491,7 +491,7 @@ class SessionCatalog(
     // If the database is defined, this is definitely not a temp table.
     // If the database is not defined, there is a good chance this is a temp table.
     if (name.database.isEmpty) {
-      tempTables.get(name.table).foreach(_.refresh())
+      tempTables.get(formatTableName(name.table)).foreach(_.refresh())
     }
   }
 
@@ -508,7 +508,7 @@ class SessionCatalog(
    * For testing only.
    */
   private[catalog] def getTempTable(name: String): Option[LogicalPlan] = synchronized {
-    tempTables.get(name)
+    tempTables.get(formatTableName(name))
   }
 
   // ----------------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index c3e3b215bbc589dbb5a66f285939b2340aa836bb..2a452f4379afba94f928795a77fc904c9ef0efd1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -1212,7 +1212,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
    *
    * For example:
    * {{{
-   *   CREATE [TEMPORARY] VIEW [IF NOT EXISTS] [db_name.]view_name
+   *   CREATE [OR REPLACE] [TEMPORARY] VIEW [IF NOT EXISTS] [db_name.]view_name
    *   [(column_name [COMMENT column_comment], ...) ]
    *   [COMMENT view_comment]
    *   [TBLPROPERTIES (property_name = property_value, ...)]
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
index eacf254cd183ddc2b9ba954646fff7d3d70f7c02..98aa447fc0560f428c0b8dda6a35ec02b1285672 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql
 import java.io.File
 
 import org.apache.spark.SparkException
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSQLContext
 
 /**
@@ -85,4 +86,28 @@ class MetadataCacheSuite extends QueryTest with SharedSQLContext {
       assert(newCount > 0 && newCount < 100)
     }}
   }
+
+  test("case sensitivity support in temporary view refresh") {
+    withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
+      withTempView("view_refresh") {
+        withTempPath { (location: File) =>
+          // Create a Parquet directory
+          spark.range(start = 0, end = 100, step = 1, numPartitions = 3)
+            .write.parquet(location.getAbsolutePath)
+
+          // Read the directory in
+          spark.read.parquet(location.getAbsolutePath).createOrReplaceTempView("view_refresh")
+
+          // Delete a file
+          deleteOneFileInDirectory(location)
+          intercept[SparkException](sql("select count(*) from view_refresh").first())
+
+          // Refresh and we should be able to read it again.
+          spark.catalog.refreshTable("vIeW_reFrEsH")
+          val newCount = sql("select count(*) from view_refresh").first().getLong(0)
+          assert(newCount > 0 && newCount < 100)
+        }
+      }
+    }
+  }
 }