diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
index f85aeb1b0269474960819169227908b13c485e29..293fcfe96e677f8a92a5cf9a20582097dc5592e3 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
@@ -22,7 +22,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock
 import org.apache.spark.Logging
 import org.apache.spark.sql.DataFrame
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.columnar.InMemoryRelation
+import org.apache.spark.sql.execution.columnar.InMemoryRelation
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.storage.StorageLevel.MEMORY_AND_DISK
 
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index 3d4ce633c07c98451dc7c5d8a226d2c15fb65222..f67c951bc0663cd16d22de9275717307657155ad 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.planning._
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.plans.logical.{BroadcastHint, LogicalPlan}
 import org.apache.spark.sql.catalyst.plans.physical._
-import org.apache.spark.sql.columnar.{InMemoryColumnarTableScan, InMemoryRelation}
+import org.apache.spark.sql.execution.columnar.{InMemoryColumnarTableScan, InMemoryRelation}
 import org.apache.spark.sql.execution.datasources.{CreateTableUsing, CreateTempTableUsing, DescribeCommand => LogicalDescribeCommand, _}
 import org.apache.spark.sql.execution.{DescribeCommand => RunnableDescribeCommand}
 import org.apache.spark.sql.{Strategy, execution}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala
similarity index 75%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnAccessor.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala
index 42ec4d3433f16763e21ab1a0fdef7790a1f1eeaa..fee36f6023895798d7b36b3ff462fe6043e13483 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.nio.{ByteBuffer, ByteOrder}
 
 import org.apache.spark.sql.catalyst.expressions.{MutableRow, UnsafeArrayData, UnsafeMapData, UnsafeRow}
-import org.apache.spark.sql.columnar.compression.CompressibleColumnAccessor
+import org.apache.spark.sql.execution.columnar.compression.CompressibleColumnAccessor
 import org.apache.spark.sql.types._
 
 /**
@@ -29,7 +29,7 @@ import org.apache.spark.sql.types._
  * a [[MutableRow]]. In this way, boxing cost can be avoided by leveraging the setter methods
  * for primitive values provided by [[MutableRow]].
  */
-private[sql] trait ColumnAccessor {
+private[columnar] trait ColumnAccessor {
   initialize()
 
   protected def initialize()
@@ -41,7 +41,7 @@ private[sql] trait ColumnAccessor {
   protected def underlyingBuffer: ByteBuffer
 }
 
-private[sql] abstract class BasicColumnAccessor[JvmType](
+private[columnar] abstract class BasicColumnAccessor[JvmType](
     protected val buffer: ByteBuffer,
     protected val columnType: ColumnType[JvmType])
   extends ColumnAccessor {
@@ -61,65 +61,65 @@ private[sql] abstract class BasicColumnAccessor[JvmType](
   protected def underlyingBuffer = buffer
 }
 
-private[sql] class NullColumnAccessor(buffer: ByteBuffer)
+private[columnar] class NullColumnAccessor(buffer: ByteBuffer)
   extends BasicColumnAccessor[Any](buffer, NULL)
   with NullableColumnAccessor
 
-private[sql] abstract class NativeColumnAccessor[T <: AtomicType](
+private[columnar] abstract class NativeColumnAccessor[T <: AtomicType](
     override protected val buffer: ByteBuffer,
     override protected val columnType: NativeColumnType[T])
   extends BasicColumnAccessor(buffer, columnType)
   with NullableColumnAccessor
   with CompressibleColumnAccessor[T]
 
-private[sql] class BooleanColumnAccessor(buffer: ByteBuffer)
+private[columnar] class BooleanColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, BOOLEAN)
 
-private[sql] class ByteColumnAccessor(buffer: ByteBuffer)
+private[columnar] class ByteColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, BYTE)
 
-private[sql] class ShortColumnAccessor(buffer: ByteBuffer)
+private[columnar] class ShortColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, SHORT)
 
-private[sql] class IntColumnAccessor(buffer: ByteBuffer)
+private[columnar] class IntColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, INT)
 
-private[sql] class LongColumnAccessor(buffer: ByteBuffer)
+private[columnar] class LongColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, LONG)
 
-private[sql] class FloatColumnAccessor(buffer: ByteBuffer)
+private[columnar] class FloatColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, FLOAT)
 
-private[sql] class DoubleColumnAccessor(buffer: ByteBuffer)
+private[columnar] class DoubleColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, DOUBLE)
 
-private[sql] class StringColumnAccessor(buffer: ByteBuffer)
+private[columnar] class StringColumnAccessor(buffer: ByteBuffer)
   extends NativeColumnAccessor(buffer, STRING)
 
-private[sql] class BinaryColumnAccessor(buffer: ByteBuffer)
+private[columnar] class BinaryColumnAccessor(buffer: ByteBuffer)
   extends BasicColumnAccessor[Array[Byte]](buffer, BINARY)
   with NullableColumnAccessor
 
-private[sql] class CompactDecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
+private[columnar] class CompactDecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
   extends NativeColumnAccessor(buffer, COMPACT_DECIMAL(dataType))
 
-private[sql] class DecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
+private[columnar] class DecimalColumnAccessor(buffer: ByteBuffer, dataType: DecimalType)
   extends BasicColumnAccessor[Decimal](buffer, LARGE_DECIMAL(dataType))
   with NullableColumnAccessor
 
-private[sql] class StructColumnAccessor(buffer: ByteBuffer, dataType: StructType)
+private[columnar] class StructColumnAccessor(buffer: ByteBuffer, dataType: StructType)
   extends BasicColumnAccessor[UnsafeRow](buffer, STRUCT(dataType))
   with NullableColumnAccessor
 
-private[sql] class ArrayColumnAccessor(buffer: ByteBuffer, dataType: ArrayType)
+private[columnar] class ArrayColumnAccessor(buffer: ByteBuffer, dataType: ArrayType)
   extends BasicColumnAccessor[UnsafeArrayData](buffer, ARRAY(dataType))
   with NullableColumnAccessor
 
-private[sql] class MapColumnAccessor(buffer: ByteBuffer, dataType: MapType)
+private[columnar] class MapColumnAccessor(buffer: ByteBuffer, dataType: MapType)
   extends BasicColumnAccessor[UnsafeMapData](buffer, MAP(dataType))
   with NullableColumnAccessor
 
-private[sql] object ColumnAccessor {
+private[columnar] object ColumnAccessor {
   def apply(dataType: DataType, buffer: ByteBuffer): ColumnAccessor = {
     val buf = buffer.order(ByteOrder.nativeOrder)
 
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
similarity index 74%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnBuilder.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
index 599f30f2d73b402f963e6be15205faaf54cb2fe0..7e26f19bb7449e4f880ebda5dd37e3c63ef85ddd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala
@@ -15,16 +15,16 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.nio.{ByteBuffer, ByteOrder}
 
 import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.columnar.ColumnBuilder._
-import org.apache.spark.sql.columnar.compression.{AllCompressionSchemes, CompressibleColumnBuilder}
+import org.apache.spark.sql.execution.columnar.ColumnBuilder._
+import org.apache.spark.sql.execution.columnar.compression.{AllCompressionSchemes, CompressibleColumnBuilder}
 import org.apache.spark.sql.types._
 
-private[sql] trait ColumnBuilder {
+private[columnar] trait ColumnBuilder {
   /**
    * Initializes with an approximate lower bound on the expected number of elements in this column.
    */
@@ -46,7 +46,7 @@ private[sql] trait ColumnBuilder {
   def build(): ByteBuffer
 }
 
-private[sql] class BasicColumnBuilder[JvmType](
+private[columnar] class BasicColumnBuilder[JvmType](
     val columnStats: ColumnStats,
     val columnType: ColumnType[JvmType])
   extends ColumnBuilder {
@@ -84,17 +84,17 @@ private[sql] class BasicColumnBuilder[JvmType](
   }
 }
 
-private[sql] class NullColumnBuilder
+private[columnar] class NullColumnBuilder
   extends BasicColumnBuilder[Any](new ObjectColumnStats(NullType), NULL)
   with NullableColumnBuilder
 
-private[sql] abstract class ComplexColumnBuilder[JvmType](
+private[columnar] abstract class ComplexColumnBuilder[JvmType](
     columnStats: ColumnStats,
     columnType: ColumnType[JvmType])
   extends BasicColumnBuilder[JvmType](columnStats, columnType)
   with NullableColumnBuilder
 
-private[sql] abstract class NativeColumnBuilder[T <: AtomicType](
+private[columnar] abstract class NativeColumnBuilder[T <: AtomicType](
     override val columnStats: ColumnStats,
     override val columnType: NativeColumnType[T])
   extends BasicColumnBuilder[T#InternalType](columnStats, columnType)
@@ -102,40 +102,45 @@ private[sql] abstract class NativeColumnBuilder[T <: AtomicType](
   with AllCompressionSchemes
   with CompressibleColumnBuilder[T]
 
-private[sql] class BooleanColumnBuilder extends NativeColumnBuilder(new BooleanColumnStats, BOOLEAN)
+private[columnar]
+class BooleanColumnBuilder extends NativeColumnBuilder(new BooleanColumnStats, BOOLEAN)
 
-private[sql] class ByteColumnBuilder extends NativeColumnBuilder(new ByteColumnStats, BYTE)
+private[columnar]
+class ByteColumnBuilder extends NativeColumnBuilder(new ByteColumnStats, BYTE)
 
-private[sql] class ShortColumnBuilder extends NativeColumnBuilder(new ShortColumnStats, SHORT)
+private[columnar] class ShortColumnBuilder extends NativeColumnBuilder(new ShortColumnStats, SHORT)
 
-private[sql] class IntColumnBuilder extends NativeColumnBuilder(new IntColumnStats, INT)
+private[columnar] class IntColumnBuilder extends NativeColumnBuilder(new IntColumnStats, INT)
 
-private[sql] class LongColumnBuilder extends NativeColumnBuilder(new LongColumnStats, LONG)
+private[columnar] class LongColumnBuilder extends NativeColumnBuilder(new LongColumnStats, LONG)
 
-private[sql] class FloatColumnBuilder extends NativeColumnBuilder(new FloatColumnStats, FLOAT)
+private[columnar] class FloatColumnBuilder extends NativeColumnBuilder(new FloatColumnStats, FLOAT)
 
-private[sql] class DoubleColumnBuilder extends NativeColumnBuilder(new DoubleColumnStats, DOUBLE)
+private[columnar]
+class DoubleColumnBuilder extends NativeColumnBuilder(new DoubleColumnStats, DOUBLE)
 
-private[sql] class StringColumnBuilder extends NativeColumnBuilder(new StringColumnStats, STRING)
+private[columnar]
+class StringColumnBuilder extends NativeColumnBuilder(new StringColumnStats, STRING)
 
-private[sql] class BinaryColumnBuilder extends ComplexColumnBuilder(new BinaryColumnStats, BINARY)
+private[columnar]
+class BinaryColumnBuilder extends ComplexColumnBuilder(new BinaryColumnStats, BINARY)
 
-private[sql] class CompactDecimalColumnBuilder(dataType: DecimalType)
+private[columnar] class CompactDecimalColumnBuilder(dataType: DecimalType)
   extends NativeColumnBuilder(new DecimalColumnStats(dataType), COMPACT_DECIMAL(dataType))
 
-private[sql] class DecimalColumnBuilder(dataType: DecimalType)
+private[columnar] class DecimalColumnBuilder(dataType: DecimalType)
   extends ComplexColumnBuilder(new DecimalColumnStats(dataType), LARGE_DECIMAL(dataType))
 
-private[sql] class StructColumnBuilder(dataType: StructType)
+private[columnar] class StructColumnBuilder(dataType: StructType)
   extends ComplexColumnBuilder(new ObjectColumnStats(dataType), STRUCT(dataType))
 
-private[sql] class ArrayColumnBuilder(dataType: ArrayType)
+private[columnar] class ArrayColumnBuilder(dataType: ArrayType)
   extends ComplexColumnBuilder(new ObjectColumnStats(dataType), ARRAY(dataType))
 
-private[sql] class MapColumnBuilder(dataType: MapType)
+private[columnar] class MapColumnBuilder(dataType: MapType)
   extends ComplexColumnBuilder(new ObjectColumnStats(dataType), MAP(dataType))
 
-private[sql] object ColumnBuilder {
+private[columnar] object ColumnBuilder {
   val DEFAULT_INITIAL_BUFFER_SIZE = 128 * 1024
   val MAX_BATCH_SIZE_IN_BYTE = 4 * 1024 * 1024L
 
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnStats.scala
similarity index 88%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnStats.scala
index 91a05650585cf7440d9cd8a343bc82183a9c098f..c52ee9ffd6d2affcf0fc69e220fcbcba51cf47b5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnStats.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnStats.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, Attribute, AttributeMap, AttributeReference}
 import org.apache.spark.sql.types._
 import org.apache.spark.unsafe.types.UTF8String
 
-private[sql] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
+private[columnar] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
   val upperBound = AttributeReference(a.name + ".upperBound", a.dataType, nullable = true)()
   val lowerBound = AttributeReference(a.name + ".lowerBound", a.dataType, nullable = true)()
   val nullCount = AttributeReference(a.name + ".nullCount", IntegerType, nullable = false)()
@@ -32,7 +32,7 @@ private[sql] class ColumnStatisticsSchema(a: Attribute) extends Serializable {
   val schema = Seq(lowerBound, upperBound, nullCount, count, sizeInBytes)
 }
 
-private[sql] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Serializable {
+private[columnar] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Serializable {
   val (forAttribute, schema) = {
     val allStats = tableSchema.map(a => a -> new ColumnStatisticsSchema(a))
     (AttributeMap(allStats), allStats.map(_._2.schema).foldLeft(Seq.empty[Attribute])(_ ++ _))
@@ -45,10 +45,10 @@ private[sql] class PartitionStatistics(tableSchema: Seq[Attribute]) extends Seri
  * NOTE: we intentionally avoid using `Ordering[T]` to compare values here because `Ordering[T]`
  * brings significant performance penalty.
  */
-private[sql] sealed trait ColumnStats extends Serializable {
+private[columnar] sealed trait ColumnStats extends Serializable {
   protected var count = 0
   protected var nullCount = 0
-  private[sql] var sizeInBytes = 0L
+  private[columnar] var sizeInBytes = 0L
 
   /**
    * Gathers statistics information from `row(ordinal)`.
@@ -72,14 +72,14 @@ private[sql] sealed trait ColumnStats extends Serializable {
 /**
  * A no-op ColumnStats only used for testing purposes.
  */
-private[sql] class NoopColumnStats extends ColumnStats {
+private[columnar] class NoopColumnStats extends ColumnStats {
   override def gatherStats(row: InternalRow, ordinal: Int): Unit = super.gatherStats(row, ordinal)
 
   override def collectedStatistics: GenericInternalRow =
     new GenericInternalRow(Array[Any](null, null, nullCount, count, 0L))
 }
 
-private[sql] class BooleanColumnStats extends ColumnStats {
+private[columnar] class BooleanColumnStats extends ColumnStats {
   protected var upper = false
   protected var lower = true
 
@@ -97,7 +97,7 @@ private[sql] class BooleanColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class ByteColumnStats extends ColumnStats {
+private[columnar] class ByteColumnStats extends ColumnStats {
   protected var upper = Byte.MinValue
   protected var lower = Byte.MaxValue
 
@@ -115,7 +115,7 @@ private[sql] class ByteColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class ShortColumnStats extends ColumnStats {
+private[columnar] class ShortColumnStats extends ColumnStats {
   protected var upper = Short.MinValue
   protected var lower = Short.MaxValue
 
@@ -133,7 +133,7 @@ private[sql] class ShortColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class IntColumnStats extends ColumnStats {
+private[columnar] class IntColumnStats extends ColumnStats {
   protected var upper = Int.MinValue
   protected var lower = Int.MaxValue
 
@@ -151,7 +151,7 @@ private[sql] class IntColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class LongColumnStats extends ColumnStats {
+private[columnar] class LongColumnStats extends ColumnStats {
   protected var upper = Long.MinValue
   protected var lower = Long.MaxValue
 
@@ -169,7 +169,7 @@ private[sql] class LongColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class FloatColumnStats extends ColumnStats {
+private[columnar] class FloatColumnStats extends ColumnStats {
   protected var upper = Float.MinValue
   protected var lower = Float.MaxValue
 
@@ -187,7 +187,7 @@ private[sql] class FloatColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class DoubleColumnStats extends ColumnStats {
+private[columnar] class DoubleColumnStats extends ColumnStats {
   protected var upper = Double.MinValue
   protected var lower = Double.MaxValue
 
@@ -205,7 +205,7 @@ private[sql] class DoubleColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class StringColumnStats extends ColumnStats {
+private[columnar] class StringColumnStats extends ColumnStats {
   protected var upper: UTF8String = null
   protected var lower: UTF8String = null
 
@@ -223,7 +223,7 @@ private[sql] class StringColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class BinaryColumnStats extends ColumnStats {
+private[columnar] class BinaryColumnStats extends ColumnStats {
   override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
     super.gatherStats(row, ordinal)
     if (!row.isNullAt(ordinal)) {
@@ -235,7 +235,7 @@ private[sql] class BinaryColumnStats extends ColumnStats {
     new GenericInternalRow(Array[Any](null, null, nullCount, count, sizeInBytes))
 }
 
-private[sql] class DecimalColumnStats(precision: Int, scale: Int) extends ColumnStats {
+private[columnar] class DecimalColumnStats(precision: Int, scale: Int) extends ColumnStats {
   def this(dt: DecimalType) = this(dt.precision, dt.scale)
 
   protected var upper: Decimal = null
@@ -256,7 +256,7 @@ private[sql] class DecimalColumnStats(precision: Int, scale: Int) extends Column
     new GenericInternalRow(Array[Any](lower, upper, nullCount, count, sizeInBytes))
 }
 
-private[sql] class ObjectColumnStats(dataType: DataType) extends ColumnStats {
+private[columnar] class ObjectColumnStats(dataType: DataType) extends ColumnStats {
   val columnType = ColumnType(dataType)
 
   override def gatherStats(row: InternalRow, ordinal: Int): Unit = {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
similarity index 93%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
index 68e509eb5047db971b1a464505d066164ddc713d..c9f2329db4b6da6d56eb8196c05bb0e6ad875330 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnType.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.math.{BigDecimal, BigInteger}
 import java.nio.ByteBuffer
@@ -41,7 +41,7 @@ import org.apache.spark.unsafe.types.UTF8String
  *
  * WARNNING: This only works with HeapByteBuffer
  */
-object ByteBufferHelper {
+private[columnar] object ByteBufferHelper {
   def getInt(buffer: ByteBuffer): Int = {
     val pos = buffer.position()
     buffer.position(pos + 4)
@@ -73,7 +73,7 @@ object ByteBufferHelper {
  *
  * @tparam JvmType Underlying Java type to represent the elements.
  */
-private[sql] sealed abstract class ColumnType[JvmType] {
+private[columnar] sealed abstract class ColumnType[JvmType] {
 
   // The catalyst data type of this column.
   def dataType: DataType
@@ -142,7 +142,7 @@ private[sql] sealed abstract class ColumnType[JvmType] {
   override def toString: String = getClass.getSimpleName.stripSuffix("$")
 }
 
-private[sql] object NULL extends ColumnType[Any] {
+private[columnar] object NULL extends ColumnType[Any] {
 
   override def dataType: DataType = NullType
   override def defaultSize: Int = 0
@@ -152,7 +152,7 @@ private[sql] object NULL extends ColumnType[Any] {
   override def getField(row: InternalRow, ordinal: Int): Any = null
 }
 
-private[sql] abstract class NativeColumnType[T <: AtomicType](
+private[columnar] abstract class NativeColumnType[T <: AtomicType](
     val dataType: T,
     val defaultSize: Int)
   extends ColumnType[T#InternalType] {
@@ -163,7 +163,7 @@ private[sql] abstract class NativeColumnType[T <: AtomicType](
   def scalaTag: TypeTag[dataType.InternalType] = dataType.tag
 }
 
-private[sql] object INT extends NativeColumnType(IntegerType, 4) {
+private[columnar] object INT extends NativeColumnType(IntegerType, 4) {
   override def append(v: Int, buffer: ByteBuffer): Unit = {
     buffer.putInt(v)
   }
@@ -192,7 +192,7 @@ private[sql] object INT extends NativeColumnType(IntegerType, 4) {
   }
 }
 
-private[sql] object LONG extends NativeColumnType(LongType, 8) {
+private[columnar] object LONG extends NativeColumnType(LongType, 8) {
   override def append(v: Long, buffer: ByteBuffer): Unit = {
     buffer.putLong(v)
   }
@@ -220,7 +220,7 @@ private[sql] object LONG extends NativeColumnType(LongType, 8) {
   }
 }
 
-private[sql] object FLOAT extends NativeColumnType(FloatType, 4) {
+private[columnar] object FLOAT extends NativeColumnType(FloatType, 4) {
   override def append(v: Float, buffer: ByteBuffer): Unit = {
     buffer.putFloat(v)
   }
@@ -248,7 +248,7 @@ private[sql] object FLOAT extends NativeColumnType(FloatType, 4) {
   }
 }
 
-private[sql] object DOUBLE extends NativeColumnType(DoubleType, 8) {
+private[columnar] object DOUBLE extends NativeColumnType(DoubleType, 8) {
   override def append(v: Double, buffer: ByteBuffer): Unit = {
     buffer.putDouble(v)
   }
@@ -276,7 +276,7 @@ private[sql] object DOUBLE extends NativeColumnType(DoubleType, 8) {
   }
 }
 
-private[sql] object BOOLEAN extends NativeColumnType(BooleanType, 1) {
+private[columnar] object BOOLEAN extends NativeColumnType(BooleanType, 1) {
   override def append(v: Boolean, buffer: ByteBuffer): Unit = {
     buffer.put(if (v) 1: Byte else 0: Byte)
   }
@@ -302,7 +302,7 @@ private[sql] object BOOLEAN extends NativeColumnType(BooleanType, 1) {
   }
 }
 
-private[sql] object BYTE extends NativeColumnType(ByteType, 1) {
+private[columnar] object BYTE extends NativeColumnType(ByteType, 1) {
   override def append(v: Byte, buffer: ByteBuffer): Unit = {
     buffer.put(v)
   }
@@ -330,7 +330,7 @@ private[sql] object BYTE extends NativeColumnType(ByteType, 1) {
   }
 }
 
-private[sql] object SHORT extends NativeColumnType(ShortType, 2) {
+private[columnar] object SHORT extends NativeColumnType(ShortType, 2) {
   override def append(v: Short, buffer: ByteBuffer): Unit = {
     buffer.putShort(v)
   }
@@ -362,7 +362,7 @@ private[sql] object SHORT extends NativeColumnType(ShortType, 2) {
  * A fast path to copy var-length bytes between ByteBuffer and UnsafeRow without creating wrapper
  * objects.
  */
-private[sql] trait DirectCopyColumnType[JvmType] extends ColumnType[JvmType] {
+private[columnar] trait DirectCopyColumnType[JvmType] extends ColumnType[JvmType] {
 
   // copy the bytes from ByteBuffer to UnsafeRow
   override def extract(buffer: ByteBuffer, row: MutableRow, ordinal: Int): Unit = {
@@ -387,7 +387,7 @@ private[sql] trait DirectCopyColumnType[JvmType] extends ColumnType[JvmType] {
   }
 }
 
-private[sql] object STRING
+private[columnar] object STRING
   extends NativeColumnType(StringType, 8) with DirectCopyColumnType[UTF8String] {
 
   override def actualSize(row: InternalRow, ordinal: Int): Int = {
@@ -425,7 +425,7 @@ private[sql] object STRING
   override def clone(v: UTF8String): UTF8String = v.clone()
 }
 
-private[sql] case class COMPACT_DECIMAL(precision: Int, scale: Int)
+private[columnar] case class COMPACT_DECIMAL(precision: Int, scale: Int)
   extends NativeColumnType(DecimalType(precision, scale), 8) {
 
   override def extract(buffer: ByteBuffer): Decimal = {
@@ -467,13 +467,13 @@ private[sql] case class COMPACT_DECIMAL(precision: Int, scale: Int)
   }
 }
 
-private[sql] object COMPACT_DECIMAL {
+private[columnar] object COMPACT_DECIMAL {
   def apply(dt: DecimalType): COMPACT_DECIMAL = {
     COMPACT_DECIMAL(dt.precision, dt.scale)
   }
 }
 
-private[sql] sealed abstract class ByteArrayColumnType[JvmType](val defaultSize: Int)
+private[columnar] sealed abstract class ByteArrayColumnType[JvmType](val defaultSize: Int)
   extends ColumnType[JvmType] with DirectCopyColumnType[JvmType] {
 
   def serialize(value: JvmType): Array[Byte]
@@ -492,7 +492,7 @@ private[sql] sealed abstract class ByteArrayColumnType[JvmType](val defaultSize:
   }
 }
 
-private[sql] object BINARY extends ByteArrayColumnType[Array[Byte]](16) {
+private[columnar] object BINARY extends ByteArrayColumnType[Array[Byte]](16) {
 
   def dataType: DataType = BinaryType
 
@@ -512,7 +512,7 @@ private[sql] object BINARY extends ByteArrayColumnType[Array[Byte]](16) {
   def deserialize(bytes: Array[Byte]): Array[Byte] = bytes
 }
 
-private[sql] case class LARGE_DECIMAL(precision: Int, scale: Int)
+private[columnar] case class LARGE_DECIMAL(precision: Int, scale: Int)
   extends ByteArrayColumnType[Decimal](12) {
 
   override val dataType: DataType = DecimalType(precision, scale)
@@ -539,13 +539,13 @@ private[sql] case class LARGE_DECIMAL(precision: Int, scale: Int)
   }
 }
 
-private[sql] object LARGE_DECIMAL {
+private[columnar] object LARGE_DECIMAL {
   def apply(dt: DecimalType): LARGE_DECIMAL = {
     LARGE_DECIMAL(dt.precision, dt.scale)
   }
 }
 
-private[sql] case class STRUCT(dataType: StructType)
+private[columnar] case class STRUCT(dataType: StructType)
   extends ColumnType[UnsafeRow] with DirectCopyColumnType[UnsafeRow] {
 
   private val numOfFields: Int = dataType.fields.size
@@ -586,7 +586,7 @@ private[sql] case class STRUCT(dataType: StructType)
   override def clone(v: UnsafeRow): UnsafeRow = v.copy()
 }
 
-private[sql] case class ARRAY(dataType: ArrayType)
+private[columnar] case class ARRAY(dataType: ArrayType)
   extends ColumnType[UnsafeArrayData] with DirectCopyColumnType[UnsafeArrayData] {
 
   override def defaultSize: Int = 16
@@ -625,7 +625,7 @@ private[sql] case class ARRAY(dataType: ArrayType)
   override def clone(v: UnsafeArrayData): UnsafeArrayData = v.copy()
 }
 
-private[sql] case class MAP(dataType: MapType)
+private[columnar] case class MAP(dataType: MapType)
   extends ColumnType[UnsafeMapData] with DirectCopyColumnType[UnsafeMapData] {
 
   override def defaultSize: Int = 32
@@ -663,7 +663,7 @@ private[sql] case class MAP(dataType: MapType)
   override def clone(v: UnsafeMapData): UnsafeMapData = v.copy()
 }
 
-private[sql] object ColumnType {
+private[columnar] object ColumnType {
   def apply(dataType: DataType): ColumnType[_] = {
     dataType match {
       case NullType => NULL
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/GenerateColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
similarity index 98%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/GenerateColumnAccessor.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
index ff9393b465b7a6a07c3c1c7e8b873faad8333559..eaafc96e4d2e72203f4a37f798a04c231163b930 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/GenerateColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import org.apache.spark.Logging
 import org.apache.spark.sql.catalyst.InternalRow
@@ -121,7 +121,7 @@ object GenerateColumnAccessor extends CodeGenerator[Seq[DataType], ColumnarItera
       import org.apache.spark.sql.types.DataType;
       import org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder;
       import org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter;
-      import org.apache.spark.sql.columnar.MutableUnsafeRow;
+      import org.apache.spark.sql.execution.columnar.MutableUnsafeRow;
 
       public SpecificColumnarIterator generate($exprType[] expr) {
         return new SpecificColumnarIterator();
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/InMemoryColumnarTableScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala
similarity index 98%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/InMemoryColumnarTableScan.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala
index ae77298e6da2fb764b6a7631404e7f5477441cfa..ce701fb3a7f28a00a87bdcf17e8ee32a07ab513e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/InMemoryColumnarTableScan.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import scala.collection.mutable.ArrayBuffer
 
@@ -50,7 +50,8 @@ private[sql] object InMemoryRelation {
  * @param buffers The buffers for serialized columns
  * @param stats The stat of columns
  */
-private[sql] case class CachedBatch(numRows: Int, buffers: Array[Array[Byte]], stats: InternalRow)
+private[columnar]
+case class CachedBatch(numRows: Int, buffers: Array[Array[Byte]], stats: InternalRow)
 
 private[sql] case class InMemoryRelation(
     output: Seq[Attribute],
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessor.scala
similarity index 94%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnAccessor.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessor.scala
index 7eaecfe047c3f04b51a78416e29b660d62308a4a..8d99546924de110b88dff65e187fa5e3cb7646b7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessor.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.nio.{ByteOrder, ByteBuffer}
 
 import org.apache.spark.sql.catalyst.expressions.MutableRow
 
-private[sql] trait NullableColumnAccessor extends ColumnAccessor {
+private[columnar] trait NullableColumnAccessor extends ColumnAccessor {
   private var nullsBuffer: ByteBuffer = _
   private var nullCount: Int = _
   private var seenNulls: Int = 0
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilder.scala
similarity index 95%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnBuilder.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilder.scala
index 76cfddf1cd01aba3afe1dad6113ae79d2c2a2451..3a1931bfb5c8463de66b55f1bf0b7b6a93890ca3 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilder.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.nio.{ByteBuffer, ByteOrder}
 
@@ -34,7 +34,7 @@ import org.apache.spark.sql.catalyst.InternalRow
  *   +---+-----+---------+
  * }}}
  */
-private[sql] trait NullableColumnBuilder extends ColumnBuilder {
+private[columnar] trait NullableColumnBuilder extends ColumnBuilder {
   protected var nulls: ByteBuffer = _
   protected var nullCount: Int = _
   private var pos: Int = _
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnAccessor.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnAccessor.scala
similarity index 84%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnAccessor.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnAccessor.scala
index cb205defbb1ad6934fd6df9135ec5ac90aff93f2..6579b5068e65a31734841ea2499bd3e0779f38b9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnAccessor.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnAccessor.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import org.apache.spark.sql.catalyst.expressions.MutableRow
-import org.apache.spark.sql.columnar.{ColumnAccessor, NativeColumnAccessor}
+import org.apache.spark.sql.execution.columnar.{ColumnAccessor, NativeColumnAccessor}
 import org.apache.spark.sql.types.AtomicType
 
-private[sql] trait CompressibleColumnAccessor[T <: AtomicType] extends ColumnAccessor {
+private[columnar] trait CompressibleColumnAccessor[T <: AtomicType] extends ColumnAccessor {
   this: NativeColumnAccessor[T] =>
 
   private var decoder: Decoder[T] = _
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala
similarity index 94%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnBuilder.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala
index 161021ff961548d14fce399be784f8ed10dacb2c..b0e216feb55955c248f649a739b4893ef7cf00e2 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressibleColumnBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressibleColumnBuilder.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import java.nio.{ByteBuffer, ByteOrder}
 
 import org.apache.spark.Logging
 import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.columnar.{ColumnBuilder, NativeColumnBuilder}
+import org.apache.spark.sql.execution.columnar.{ColumnBuilder, NativeColumnBuilder}
 import org.apache.spark.sql.types.AtomicType
 
 /**
@@ -40,7 +40,7 @@ import org.apache.spark.sql.types.AtomicType
  *     header         body
  * }}}
  */
-private[sql] trait CompressibleColumnBuilder[T <: AtomicType]
+private[columnar] trait CompressibleColumnBuilder[T <: AtomicType]
   extends ColumnBuilder with Logging {
 
   this: NativeColumnBuilder[T] with WithCompressionSchemes =>
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressionScheme.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressionScheme.scala
similarity index 83%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressionScheme.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressionScheme.scala
index 9322b772fd898ba4846200786b3042a892417bb1..920381f9c63d075f2dcdb28437666542054fcbc5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/CompressionScheme.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/CompressionScheme.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import java.nio.{ByteBuffer, ByteOrder}
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.MutableRow
-import org.apache.spark.sql.columnar.{ColumnType, NativeColumnType}
+import org.apache.spark.sql.execution.columnar.{ColumnType, NativeColumnType}
 import org.apache.spark.sql.types.AtomicType
 
-private[sql] trait Encoder[T <: AtomicType] {
+private[columnar] trait Encoder[T <: AtomicType] {
   def gatherCompressibilityStats(row: InternalRow, ordinal: Int): Unit = {}
 
   def compressedSize: Int
@@ -37,13 +37,13 @@ private[sql] trait Encoder[T <: AtomicType] {
   def compress(from: ByteBuffer, to: ByteBuffer): ByteBuffer
 }
 
-private[sql] trait Decoder[T <: AtomicType] {
+private[columnar] trait Decoder[T <: AtomicType] {
   def next(row: MutableRow, ordinal: Int): Unit
 
   def hasNext: Boolean
 }
 
-private[sql] trait CompressionScheme {
+private[columnar] trait CompressionScheme {
   def typeId: Int
 
   def supports(columnType: ColumnType[_]): Boolean
@@ -53,15 +53,15 @@ private[sql] trait CompressionScheme {
   def decoder[T <: AtomicType](buffer: ByteBuffer, columnType: NativeColumnType[T]): Decoder[T]
 }
 
-private[sql] trait WithCompressionSchemes {
+private[columnar] trait WithCompressionSchemes {
   def schemes: Seq[CompressionScheme]
 }
 
-private[sql] trait AllCompressionSchemes extends WithCompressionSchemes {
+private[columnar] trait AllCompressionSchemes extends WithCompressionSchemes {
   override val schemes: Seq[CompressionScheme] = CompressionScheme.all
 }
 
-private[sql] object CompressionScheme {
+private[columnar] object CompressionScheme {
   val all: Seq[CompressionScheme] =
     Seq(PassThrough, RunLengthEncoding, DictionaryEncoding, BooleanBitSet, IntDelta, LongDelta)
 
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/compressionSchemes.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
similarity index 96%
rename from sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/compressionSchemes.scala
rename to sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
index 41c9a284e3e4ae9250d2be3bf40eccc2cf34dd8e..941f03b745a075aced0bbdab2baf6f4d5290d115 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/columnar/compression/compressionSchemes.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/compression/compressionSchemes.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import java.nio.ByteBuffer
 
@@ -23,11 +23,11 @@ import scala.collection.mutable
 
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.{MutableRow, SpecificMutableRow}
-import org.apache.spark.sql.columnar._
+import org.apache.spark.sql.execution.columnar._
 import org.apache.spark.sql.types._
 
 
-private[sql] case object PassThrough extends CompressionScheme {
+private[columnar] case object PassThrough extends CompressionScheme {
   override val typeId = 0
 
   override def supports(columnType: ColumnType[_]): Boolean = true
@@ -64,7 +64,7 @@ private[sql] case object PassThrough extends CompressionScheme {
   }
 }
 
-private[sql] case object RunLengthEncoding extends CompressionScheme {
+private[columnar] case object RunLengthEncoding extends CompressionScheme {
   override val typeId = 1
 
   override def encoder[T <: AtomicType](columnType: NativeColumnType[T]): Encoder[T] = {
@@ -172,7 +172,7 @@ private[sql] case object RunLengthEncoding extends CompressionScheme {
   }
 }
 
-private[sql] case object DictionaryEncoding extends CompressionScheme {
+private[columnar] case object DictionaryEncoding extends CompressionScheme {
   override val typeId = 2
 
   // 32K unique values allowed
@@ -281,7 +281,7 @@ private[sql] case object DictionaryEncoding extends CompressionScheme {
   }
 }
 
-private[sql] case object BooleanBitSet extends CompressionScheme {
+private[columnar] case object BooleanBitSet extends CompressionScheme {
   override val typeId = 3
 
   val BITS_PER_LONG = 64
@@ -371,7 +371,7 @@ private[sql] case object BooleanBitSet extends CompressionScheme {
   }
 }
 
-private[sql] case object IntDelta extends CompressionScheme {
+private[columnar] case object IntDelta extends CompressionScheme {
   override def typeId: Int = 4
 
   override def decoder[T <: AtomicType](buffer: ByteBuffer, columnType: NativeColumnType[T])
@@ -451,7 +451,7 @@ private[sql] case object IntDelta extends CompressionScheme {
   }
 }
 
-private[sql] case object LongDelta extends CompressionScheme {
+private[columnar] case object LongDelta extends CompressionScheme {
   override def typeId: Int = 5
 
   override def decoder[T <: AtomicType](buffer: ByteBuffer, columnType: NativeColumnType[T])
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala
index 28fa231e722d04a22fa36a4ac7c65e755035298d..c912734bba9e349902d6b51452d41a403947bc14 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/package.scala
@@ -19,5 +19,7 @@ package org.apache.spark.sql
 
 /**
  * The physical execution component of Spark SQL. Note that this is a private package.
+ * All classes in catalyst are considered an internal API to Spark SQL and are subject
+ * to change between minor releases.
  */
 package object execution
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index bce94dafad755366c2cb6f8668b6b4a5c40f80a6..d86df4cfb9b4d3c6f9726a24ad820868a7e27afd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -27,7 +27,7 @@ import scala.language.postfixOps
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark.Accumulators
-import org.apache.spark.sql.columnar._
+import org.apache.spark.sql.execution.columnar._
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.test.{SQLTestUtils, SharedSQLContext}
 import org.apache.spark.storage.{StorageLevel, RDDBlockId}
@@ -280,7 +280,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
     sql("CACHE TABLE testData")
     sqlContext.table("testData").queryExecution.withCachedData.collect {
       case cached: InMemoryRelation =>
-        val actualSizeInBytes = (1 to 100).map(i => INT.defaultSize + i.toString.length + 4).sum
+        val actualSizeInBytes = (1 to 100).map(i => 4 + i.toString.length + 4).sum
         assert(cached.statistics.sizeInBytes === actualSizeInBytes)
     }
   }
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
index b5417b195f3966501af888f5d5768e9346f391e4..6ea1fe4ccfd898e702abf5a06eff5e07ef24f2b8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
@@ -23,7 +23,7 @@ import scala.collection.JavaConverters._
 
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.columnar.InMemoryRelation
+import org.apache.spark.sql.execution.columnar.InMemoryRelation
 
 abstract class QueryTest extends PlanTest {
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnStatsSuite.scala
similarity index 96%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnStatsSuite.scala
index 89a664001bdd201948f48072939ee8a9d6baf248..b2d04f7c5a6e36ee93b6766a02771dba745a72b9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnStatsSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
@@ -50,7 +50,7 @@ class ColumnStatsSuite extends SparkFunSuite {
     }
 
     test(s"$columnStatsName: non-empty") {
-      import org.apache.spark.sql.columnar.ColumnarTestUtils._
+      import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 
       val columnStats = columnStatsClass.newInstance()
       val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
@@ -86,7 +86,7 @@ class ColumnStatsSuite extends SparkFunSuite {
     }
 
     test(s"$columnStatsName: non-empty") {
-      import org.apache.spark.sql.columnar.ColumnarTestUtils._
+      import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 
       val columnStats = new DecimalColumnStats(15, 10)
       val rows = Seq.fill(10)(makeRandomRow(columnType)) ++ Seq.fill(10)(makeNullRow(1))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
similarity index 97%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
index 63bc39bfa0307fad3c0ade6878fe0199a208401d..34dd96929e6c149b754acabefbf3e81bd396c24c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnTypeSuite.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.nio.{ByteOrder, ByteBuffer}
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.CatalystTypeConverters
 import org.apache.spark.sql.catalyst.expressions.{UnsafeProjection, GenericMutableRow}
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 import org.apache.spark.sql.types._
 import org.apache.spark.{Logging, SparkFunSuite}
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnarTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnarTestUtils.scala
similarity index 98%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnarTestUtils.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnarTestUtils.scala
index a5882f7870e3719790275353c7cf134383db6efd..9cae65ef6f5dcd047b559912cd4ed7a698458cbf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnarTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/ColumnarTestUtils.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import scala.collection.immutable.HashSet
 import scala.util.Random
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
similarity index 99%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
index 6265e40a0a07bed234e5c07d0c774cd919ee5de1..25afed25c897b28184f426e5a0c5848b0d7ca8f0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.sql.{Date, Timestamp}
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnAccessorSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessorSuite.scala
similarity index 96%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnAccessorSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessorSuite.scala
index aa1605fee8c73e2b6ee5ab70ecf7efcb5c0153c1..35dc9a276cef7b58b0589f826215ad9bad269ea3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnAccessorSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnAccessorSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import java.nio.ByteBuffer
 
@@ -38,7 +38,7 @@ object TestNullableColumnAccessor {
 }
 
 class NullableColumnAccessorSuite extends SparkFunSuite {
-  import org.apache.spark.sql.columnar.ColumnarTestUtils._
+  import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 
   Seq(
     NULL, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilderSuite.scala
similarity index 96%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilderSuite.scala
index 91404577832a044a6a7623924929faab6bb97dc9..93be3e16a5ed90758dc982c46c3d363b847bffc9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/NullableColumnBuilderSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.CatalystTypeConverters
@@ -36,7 +36,7 @@ object TestNullableColumnBuilder {
 }
 
 class NullableColumnBuilderSuite extends SparkFunSuite {
-  import org.apache.spark.sql.columnar.ColumnarTestUtils._
+  import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 
   Seq(
     BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/PartitionBatchPruningSuite.scala
similarity index 99%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/PartitionBatchPruningSuite.scala
index 6b7401464f46f075ebd7c508bc2595d4dda315eb..d762f7bfe914cc55e5b54014f7f879de4031424d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/PartitionBatchPruningSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/PartitionBatchPruningSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar
+package org.apache.spark.sql.execution.columnar
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/BooleanBitSetSuite.scala
similarity index 94%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/BooleanBitSetSuite.scala
index 9a2948c59ba420d6ef1bca18240b66d7e26f45ca..ccbddef0fad3aba1359a9546df85fbf985fc81c4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/BooleanBitSetSuite.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
-import org.apache.spark.sql.columnar.{BOOLEAN, NoopColumnStats}
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar.{BOOLEAN, NoopColumnStats}
 
 class BooleanBitSetSuite extends SparkFunSuite {
   import BooleanBitSet._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/DictionaryEncodingSuite.scala
similarity index 96%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/DictionaryEncodingSuite.scala
index acfab6586c0d10d0b1e2634976995ed1264bfae1..830ca0294e1b86681f5989e5604093a2686db21f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/DictionaryEncodingSuite.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import java.nio.ByteBuffer
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 import org.apache.spark.sql.types.AtomicType
 
 class DictionaryEncodingSuite extends SparkFunSuite {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/IntegralDeltaSuite.scala
similarity index 96%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/IntegralDeltaSuite.scala
index 2111e9fbe62cb80c7cb6eac907369111a1c3f10d..988a577a7b4d04b0cccff3b6a482440066c86038 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/IntegralDeltaSuite.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 import org.apache.spark.sql.types.IntegralType
 
 class IntegralDeltaSuite extends SparkFunSuite {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
similarity index 95%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
index 67ec08f594a43be80ca3786ba8faa5a640b603e5..ce3affba55c7197525015454d141390b4bccf974 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/RunLengthEncodingSuite.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
-import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.columnar.ColumnarTestUtils._
+import org.apache.spark.sql.execution.columnar._
+import org.apache.spark.sql.execution.columnar.ColumnarTestUtils._
 import org.apache.spark.sql.types.AtomicType
 
 class RunLengthEncodingSuite extends SparkFunSuite {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/TestCompressibleColumnBuilder.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/TestCompressibleColumnBuilder.scala
similarity index 93%
rename from sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/TestCompressibleColumnBuilder.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/TestCompressibleColumnBuilder.scala
index 5268dfe0aa03ef8d22fde1c9351f3d7034516112..5e078f251375a30062798e727ea2a9601e7748fc 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/TestCompressibleColumnBuilder.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/TestCompressibleColumnBuilder.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.columnar.compression
+package org.apache.spark.sql.execution.columnar.compression
 
-import org.apache.spark.sql.columnar._
+import org.apache.spark.sql.execution.columnar._
 import org.apache.spark.sql.types.AtomicType
 
 class TestCompressibleColumnBuilder[T <: AtomicType](
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
index 5c2fc7d82ffbdf99d2c35ba3c91ac75376465643..99478e82d419fc1a0775685fea2c50f1aa0eb68e 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive
 
 import java.io.File
 
-import org.apache.spark.sql.columnar.InMemoryColumnarTableScan
+import org.apache.spark.sql.execution.columnar.InMemoryColumnarTableScan
 import org.apache.spark.sql.execution.datasources.parquet.ParquetRelation
 import org.apache.spark.sql.hive.test.TestHiveSingleton
 import org.apache.spark.sql.{AnalysisException, QueryTest, SaveMode}