diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
index 4f9537d1c70fa9ff24769a8443ee312819e94503..bde3d1f592f0b0cd5dbd3dddb532a9439ff46253 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
@@ -34,7 +34,8 @@ import org.apache.spark.util.Utils
  *
  * @param rootDirs The directories to use for storing block files. Data will be hashed among these.
  */
-private[spark] class DiskBlockManager(shuffleManager: ShuffleBlockManager, rootDirs: String) extends PathResolver with Logging {
+private[spark] class DiskBlockManager(shuffleManager: ShuffleBlockManager, rootDirs: String)
+  extends PathResolver with Logging {
 
   private val MAX_DIR_CREATION_ATTEMPTS: Int = 10
   private val subDirsPerLocalDir = System.getProperty("spark.diskStore.subDirectories", "64").toInt
diff --git a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
index c61febf830e94922545a2c6fee71e558f3c1fd8d..d718c87cabfc27ee018fbece08d5f8f36d7e725b 100644
--- a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
@@ -27,7 +27,8 @@ import scala.collection.mutable
 
 import org.apache.spark.Logging
 import org.apache.spark.serializer.Serializer
-import org.apache.spark.util.{MetadataCleanerType, MetadataCleaner, AGodDamnPrimitiveVector, TimeStampedHashMap}
+import org.apache.spark.util.{MetadataCleanerType, MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.util.collection.PrimitiveVector
 
 private[spark]
 class ShuffleWriterGroup(
@@ -203,7 +204,7 @@ class ShuffleBlockManager(blockManager: BlockManager) extends Logging {
  */
 private[spark]
 class ShuffleFileGroup(val shuffleId: Int, val fileId: Int, val files: Array[ShuffleFile]) {
-  private val mapIds = new AGodDamnPrimitiveVector[Int]()
+  private val mapIds = new PrimitiveVector[Int]()
 
   files.foreach(_.setShuffleFileGroup(this))
 
@@ -238,7 +239,7 @@ class ShuffleFile(val file: File) {
    * Consecutive offsets of blocks into the file, ordered by position in the file.
    * This ordering allows us to compute block lengths by examining the following block offset.
    */
-  val blockOffsets = new AGodDamnPrimitiveVector[Long]()
+  val blockOffsets = new PrimitiveVector[Long]()
 
   /** Back pointer to whichever ShuffleFileGroup this file is a part of. */
   private var shuffleFileGroup : ShuffleFileGroup = _
diff --git a/core/src/main/scala/org/apache/spark/util/PrimitiveVector.scala b/core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
similarity index 90%
rename from core/src/main/scala/org/apache/spark/util/PrimitiveVector.scala
rename to core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
index d316601b905b9cff52ca7af437d19c0b01ada80a..721f12b71156463590c12e3ea4c360cfc1f515b3 100644
--- a/core/src/main/scala/org/apache/spark/util/PrimitiveVector.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
@@ -15,12 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.spark.util
+package org.apache.spark.util.collection
 
 /** Provides a simple, non-threadsafe, array-backed vector that can store primitives. */
-class AGodDamnPrimitiveVector[@specialized(Long, Int, Double) V: ClassManifest]
-    (initialSize: Int = 64)
-{
+private[spark]
+class PrimitiveVector[@specialized(Long, Int, Double) V: ClassManifest](initialSize: Int = 64) {
   private var numElements = 0
   private var array = new Array[V](initialSize)