diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 28aa249888b300c56d33f8146610190ff99c8235..cd242d78a41235c9e6cd39c883571ab3676d2e2c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.catalog.SimpleCatalogRelation
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
 import org.apache.spark.sql.catalyst.plans.UsingJoin
@@ -305,6 +306,20 @@ trait CheckAnalysis extends PredicateHelper {
                  |Conflicting attributes: ${conflictingAttributes.mkString(",")}
                """.stripMargin)
 
+          case s: SimpleCatalogRelation =>
+            failAnalysis(
+              s"""
+                 |Please enable Hive support when selecting the regular tables:
+                 |${s.catalogTable.identifier}
+               """.stripMargin)
+
+          case InsertIntoTable(s: SimpleCatalogRelation, _, _, _, _) =>
+            failAnalysis(
+              s"""
+                 |Please enable Hive support when inserting the regular tables:
+                 |${s.catalogTable.identifier}
+               """.stripMargin)
+
           case o if !o.resolved =>
             failAnalysis(
               s"unresolved operator ${operator.simpleString}")
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
index b1b3e00de14564f3cc9e738b74a05953241ba9ed..ca0096eeb20870703932c11574ac4b91bcf02ed0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
@@ -377,7 +377,8 @@ case class InsertIntoTable(
   }
 
   assert(overwrite || !ifNotExists)
-  override lazy val resolved: Boolean = childrenResolved && expectedColumns.forall { expected =>
+  override lazy val resolved: Boolean =
+    childrenResolved && table.resolved && expectedColumns.forall { expected =>
     child.output.size == expected.size && child.output.zip(expected).forall {
       case (childAttr, tableAttr) =>
         DataType.equalsIgnoreCompatibleNullability(childAttr.dataType, tableAttr.dataType)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index d72dc092e277c7efad68e5b0d6cc4b160f338395..64f5a4ac479a750abf89ee9088095a77eee33256 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -22,6 +22,7 @@ import java.io.File
 import org.apache.hadoop.fs.Path
 import org.scalatest.BeforeAndAfterEach
 
+import org.apache.spark.internal.config._
 import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.{DatabaseAlreadyExistsException, NoSuchPartitionException, NoSuchTableException}
@@ -1044,6 +1045,53 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
     )
   }
 
+  test("select/insert into the managed table") {
+    assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
+    val tabName = "tbl"
+    withTable(tabName) {
+      sql(s"CREATE TABLE $tabName (i INT, j STRING)")
+      val catalogTable =
+        spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
+      assert(catalogTable.tableType == CatalogTableType.MANAGED)
+
+      var message = intercept[AnalysisException] {
+        sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
+      }.getMessage
+      assert(message.contains("Please enable Hive support when inserting the regular tables"))
+      message = intercept[AnalysisException] {
+        sql(s"SELECT * FROM $tabName")
+      }.getMessage
+      assert(message.contains("Please enable Hive support when selecting the regular tables"))
+    }
+  }
+
+  test("select/insert into external table") {
+    assume(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
+    withTempDir { tempDir =>
+      val tabName = "tbl"
+      withTable(tabName) {
+        sql(
+          s"""
+             |CREATE EXTERNAL TABLE $tabName (i INT, j STRING)
+             |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
+             |LOCATION '$tempDir'
+           """.stripMargin)
+        val catalogTable =
+          spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName, Some("default")))
+        assert(catalogTable.tableType == CatalogTableType.EXTERNAL)
+
+        var message = intercept[AnalysisException] {
+          sql(s"INSERT OVERWRITE TABLE $tabName SELECT 1, 'a'")
+        }.getMessage
+        assert(message.contains("Please enable Hive support when inserting the regular tables"))
+        message = intercept[AnalysisException] {
+          sql(s"SELECT * FROM $tabName")
+        }.getMessage
+        assert(message.contains("Please enable Hive support when selecting the regular tables"))
+      }
+    }
+  }
+
   test("drop default database") {
     Seq("true", "false").foreach { caseSensitive =>
       withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) {