diff --git a/core/src/main/java/org/apache/spark/network/netty/FileClient.java b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
index d2d778b7567bcd925d299c6273cd58be4ddaf63f..0d31894d6ec7a0717f3ec1a990693f355c66c410 100644
--- a/core/src/main/java/org/apache/spark/network/netty/FileClient.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
@@ -17,6 +17,8 @@
 
 package org.apache.spark.network.netty;
 
+import java.util.concurrent.TimeUnit;
+
 import io.netty.bootstrap.Bootstrap;
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelOption;
@@ -27,8 +29,6 @@ import io.netty.channel.socket.oio.OioSocketChannel;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.concurrent.TimeUnit;
-
 class FileClient {
 
   private static final Logger LOG = LoggerFactory.getLogger(FileClient.class.getName());
diff --git a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
index 3ac045f9444f281c041f3f3b2a999370490782ac..c0133e19c7f79665ed15bf12527769c3b8527f91 100644
--- a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
@@ -23,11 +23,11 @@ import java.io.FileInputStream;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.SimpleChannelInboundHandler;
 import io.netty.channel.DefaultFileRegion;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.spark.storage.BlockId;
 import org.apache.spark.storage.FileSegment;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 class FileServerHandler extends SimpleChannelInboundHandler<String> {
 
diff --git a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
index 32429f01acab81e9d4dcdb9fd7289240d9ef44b0..1fca5729c60928e34f2cd52c6d1f0030d0705ef2 100644
--- a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
+++ b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
@@ -17,7 +17,8 @@
 
 package org.apache.hadoop.mapreduce
 
-import java.lang.{Integer => JInteger, Boolean => JBoolean}
+import java.lang.{Boolean => JBoolean, Integer => JInteger}
+
 import org.apache.hadoop.conf.Configuration
 
 private[apache]
diff --git a/core/src/main/scala/org/apache/spark/Accumulators.scala b/core/src/main/scala/org/apache/spark/Accumulators.scala
index df01b2e9421807e22912b56f48dc72a7ea47722f..73dd471ab1d52b718b7ac5addf90b3e4c4294374 100644
--- a/core/src/main/scala/org/apache/spark/Accumulators.scala
+++ b/core/src/main/scala/org/apache/spark/Accumulators.scala
@@ -19,8 +19,9 @@ package org.apache.spark
 
 import java.io.{ObjectInputStream, Serializable}
 
-import scala.collection.mutable.Map
 import scala.collection.generic.Growable
+import scala.collection.mutable.Map
+
 import org.apache.spark.serializer.JavaSerializer
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
index d9ed572da6dba07f37695b1f7c9406aed1f1b5d0..754b46a4c7df294a1fde6176feec7f2f0ca90c15 100644
--- a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
@@ -20,12 +20,11 @@ package org.apache.spark
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 
-import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
+import org.apache.spark.executor.ShuffleReadMetrics
 import org.apache.spark.serializer.Serializer
 import org.apache.spark.storage.{BlockId, BlockManagerId, ShuffleBlockId}
 import org.apache.spark.util.CompletionIterator
 
-
 private[spark] class BlockStoreShuffleFetcher extends ShuffleFetcher with Logging {
 
   override def fetch[T](
diff --git a/core/src/main/scala/org/apache/spark/CacheManager.scala b/core/src/main/scala/org/apache/spark/CacheManager.scala
index b38af2497d3d95c1a220e5d36034d0242b42301d..1daabecf232927e44a6ef8ab5cda510ea7ca7d57 100644
--- a/core/src/main/scala/org/apache/spark/CacheManager.scala
+++ b/core/src/main/scala/org/apache/spark/CacheManager.scala
@@ -18,9 +18,9 @@
 package org.apache.spark
 
 import scala.collection.mutable.{ArrayBuffer, HashSet}
-import org.apache.spark.storage.{BlockId, BlockManager, StorageLevel, RDDBlockId}
-import org.apache.spark.rdd.RDD
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.{BlockManager, RDDBlockId, StorageLevel}
 
 /** Spark class responsible for passing RDDs split contents to the BlockManager and making
     sure a node doesn't load two copies of an RDD at once.
diff --git a/core/src/main/scala/org/apache/spark/FutureAction.scala b/core/src/main/scala/org/apache/spark/FutureAction.scala
index d7d10285dadcb49971f6a6404eeea6ac423d35f6..f2decd14ef6d9b19cb672d4d95b931c5d7bbacf1 100644
--- a/core/src/main/scala/org/apache/spark/FutureAction.scala
+++ b/core/src/main/scala/org/apache/spark/FutureAction.scala
@@ -21,10 +21,8 @@ import scala.concurrent._
 import scala.concurrent.duration.Duration
 import scala.util.Try
 
-import org.apache.spark.scheduler.{JobSucceeded, JobWaiter}
-import org.apache.spark.scheduler.JobFailed
 import org.apache.spark.rdd.RDD
-
+import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}
 
 /**
  * A future for the result of an action to support cancellation. This is an extension of the
diff --git a/core/src/main/scala/org/apache/spark/HttpFileServer.scala b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
index a885898ad48d45e6e8d49ac099f9938cd71630c9..d3264a4bb3c810ddf68e37dfe82a08d92cd6dc50 100644
--- a/core/src/main/scala/org/apache/spark/HttpFileServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
@@ -17,8 +17,10 @@
 
 package org.apache.spark
 
-import java.io.{File}
+import java.io.File
+
 import com.google.common.io.Files
+
 import org.apache.spark.util.Utils
 
 private[spark] class HttpFileServer extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/HttpServer.scala b/core/src/main/scala/org/apache/spark/HttpServer.scala
index 69a738dc4446ac23965a8cbd3c7823f014a1fba2..759e68ee0cc6136a369d16c0f8b593354a89abc9 100644
--- a/core/src/main/scala/org/apache/spark/HttpServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpServer.scala
@@ -18,7 +18,6 @@
 package org.apache.spark
 
 import java.io.File
-import java.net.InetAddress
 
 import org.eclipse.jetty.server.Server
 import org.eclipse.jetty.server.bio.SocketConnector
@@ -26,6 +25,7 @@ import org.eclipse.jetty.server.handler.DefaultHandler
 import org.eclipse.jetty.server.handler.HandlerList
 import org.eclipse.jetty.server.handler.ResourceHandler
 import org.eclipse.jetty.util.thread.QueuedThreadPool
+
 import org.apache.spark.util.Utils
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
index 8d6db0fca23f7e811a8b42bc6d327c3402331c8f..5968973132942e9f73a3d0d6393aa0818ddceeb6 100644
--- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
+++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
@@ -22,7 +22,6 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream}
 
 import scala.collection.mutable.HashSet
 import scala.concurrent.Await
-import scala.concurrent.duration._
 
 import akka.actor._
 import akka.pattern.ask
diff --git a/core/src/main/scala/org/apache/spark/SerializableWritable.scala b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
index fdd4c24e2345f7bff20a1e565d9d50ee60c47e0f..dff665cae6cb644acd26ffd4742be20780c6fa01 100644
--- a/core/src/main/scala/org/apache/spark/SerializableWritable.scala
+++ b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
@@ -19,9 +19,9 @@ package org.apache.spark
 
 import java.io._
 
+import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.io.ObjectWritable
 import org.apache.hadoop.io.Writable
-import org.apache.hadoop.conf.Configuration
 
 class SerializableWritable[T <: Writable](@transient var t: T) extends Serializable {
   def value = t
diff --git a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
index a85aa50a9b94bb9fcdbc2b27868cfde20e9c6733..e8f756c4088899ca8c1bae582f9facf8840d5c99 100644
--- a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
@@ -17,10 +17,8 @@
 
 package org.apache.spark
 
-import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.serializer.Serializer
 
-
 private[spark] abstract class ShuffleFetcher {
 
   /**
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 45d19bcbfa6f2e3f891e7802c3279ba37757236b..b947feb891ee63157a1c36428721efa66b231b0d 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -20,8 +20,6 @@ package org.apache.spark
 import scala.collection.JavaConverters._
 import scala.collection.mutable.HashMap
 
-import java.io.{ObjectInputStream, ObjectOutputStream, IOException}
-
 /**
  * Configuration for a Spark application. Used to set various Spark parameters as key-value pairs.
  *
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 5a6d06b66e85bc2a8881cf186ee84493c7b69ab2..a24f07e9a6e9a9fd409fe122c6bdd6c603eecc0a 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -19,21 +19,18 @@ package org.apache.spark
 
 import java.io._
 import java.net.URI
-import java.util.{UUID, Properties}
+import java.util.{Properties, UUID}
 import java.util.concurrent.atomic.AtomicInteger
 
 import scala.collection.{Map, Set}
 import scala.collection.generic.Growable
-
 import scala.collection.mutable.{ArrayBuffer, HashMap}
 import scala.reflect.{ClassTag, classTag}
 
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
-import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable,
-  FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable}
-import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat,
-  TextInputFormat}
+import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable, FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable}
+import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat, TextInputFormat}
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, Job => NewHadoopJob}
 import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFormat}
 import org.apache.mesos.MesosNativeLibrary
@@ -42,14 +39,12 @@ import org.apache.spark.deploy.{LocalSparkCluster, SparkHadoopUtil}
 import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
 import org.apache.spark.rdd._
 import org.apache.spark.scheduler._
-import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend,
-  SparkDeploySchedulerBackend, SimrSchedulerBackend}
+import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SparkDeploySchedulerBackend, SimrSchedulerBackend}
 import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
 import org.apache.spark.scheduler.local.LocalBackend
 import org.apache.spark.storage.{BlockManagerSource, RDDInfo, StorageStatus, StorageUtils}
 import org.apache.spark.ui.SparkUI
-import org.apache.spark.util.{Utils, TimeStampedHashMap, MetadataCleaner, MetadataCleanerType,
-  ClosureCleaner}
+import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedHashMap, Utils}
 
 /**
  * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 6ae020f6a21b1da3f21881e18f30e27cd808b7d0..7ac65828f670f34e34b0f75b334d9d8eaeca9ad0 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -21,16 +21,15 @@ import scala.collection.mutable
 import scala.concurrent.Await
 
 import akka.actor._
+import com.google.common.collect.MapMaker
 
+import org.apache.spark.api.python.PythonWorkerFactory
 import org.apache.spark.broadcast.BroadcastManager
 import org.apache.spark.metrics.MetricsSystem
-import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster}
+import org.apache.spark.storage.{BlockManager, BlockManagerMaster, BlockManagerMasterActor}
 import org.apache.spark.network.ConnectionManager
 import org.apache.spark.serializer.{Serializer, SerializerManager}
-import org.apache.spark.util.{Utils, AkkaUtils}
-import org.apache.spark.api.python.PythonWorkerFactory
-
-import com.google.common.collect.MapMaker
+import org.apache.spark.util.{AkkaUtils, Utils}
 
 /**
  * Holds all the runtime environment objects for a running Spark instance (either master or worker),
diff --git a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 4e63117a5133461991efac6136949c85b31fee20..d404459a8eb7ebff6f80be3874d577caedc15c64 100644
--- a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -18,8 +18,8 @@
 package org.apache.hadoop.mapred
 
 import java.io.IOException
-import java.text.SimpleDateFormat
 import java.text.NumberFormat
+import java.text.SimpleDateFormat
 import java.util.Date
 
 import org.apache.hadoop.fs.FileSystem
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
index 33737e1960aded65caf9494a8eda115e376f1ec1..071044463d9809b0fc7cbe9b0a03dc3560e2bae1 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
@@ -17,27 +17,25 @@
 
 package org.apache.spark.api.java
 
+import java.lang.{Double => JDouble}
+
 import scala.reflect.ClassTag
 
-import org.apache.spark.rdd.RDD
+import org.apache.spark.Partitioner
 import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions
 import org.apache.spark.api.java.function.{Function => JFunction}
-import org.apache.spark.util.StatCounter
 import org.apache.spark.partial.{BoundedDouble, PartialResult}
+import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.StatCounter
 
-import java.lang.Double
-import org.apache.spark.Partitioner
-
-import scala.collection.JavaConverters._
-
-class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, JavaDoubleRDD] {
+class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[JDouble, JavaDoubleRDD] {
 
-  override val classTag: ClassTag[Double] = implicitly[ClassTag[Double]]
+  override val classTag: ClassTag[JDouble] = implicitly[ClassTag[JDouble]]
 
-  override val rdd: RDD[Double] = srdd.map(x => Double.valueOf(x))
+  override val rdd: RDD[JDouble] = srdd.map(x => JDouble.valueOf(x))
 
-  override def wrapRDD(rdd: RDD[Double]): JavaDoubleRDD =
+  override def wrapRDD(rdd: RDD[JDouble]): JavaDoubleRDD =
     new JavaDoubleRDD(rdd.map(_.doubleValue))
 
   // Common RDD functions
@@ -67,7 +65,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   def unpersist(blocking: Boolean): JavaDoubleRDD = fromRDD(srdd.unpersist(blocking))
 
   // first() has to be overriden here in order for its return type to be Double instead of Object.
-  override def first(): Double = srdd.first()
+  override def first(): JDouble = srdd.first()
 
   // Transformations (return a new RDD)
 
@@ -84,7 +82,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   /**
    * Return a new RDD containing only the elements that satisfy a predicate.
    */
-  def filter(f: JFunction[Double, java.lang.Boolean]): JavaDoubleRDD =
+  def filter(f: JFunction[JDouble, java.lang.Boolean]): JavaDoubleRDD =
     fromRDD(srdd.filter(x => f(x).booleanValue()))
 
   /**
@@ -133,7 +131,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   /**
    * Return a sampled subset of this RDD.
    */
-  def sample(withReplacement: Boolean, fraction: Double, seed: Int): JavaDoubleRDD =
+  def sample(withReplacement: Boolean, fraction: JDouble, seed: Int): JavaDoubleRDD =
     fromRDD(srdd.sample(withReplacement, fraction, seed))
 
   /**
@@ -145,7 +143,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   // Double RDD functions
 
   /** Add up the elements in this RDD. */
-  def sum(): Double = srdd.sum()
+  def sum(): JDouble = srdd.sum()
 
   /**
    * Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and
@@ -154,35 +152,35 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   def stats(): StatCounter = srdd.stats()
 
   /** Compute the mean of this RDD's elements. */
-  def mean(): Double = srdd.mean()
+  def mean(): JDouble = srdd.mean()
 
   /** Compute the variance of this RDD's elements. */
-  def variance(): Double = srdd.variance()
+  def variance(): JDouble = srdd.variance()
 
   /** Compute the standard deviation of this RDD's elements. */
-  def stdev(): Double = srdd.stdev()
+  def stdev(): JDouble = srdd.stdev()
 
   /**
    * Compute the sample standard deviation of this RDD's elements (which corrects for bias in
    * estimating the standard deviation by dividing by N-1 instead of N).
    */
-  def sampleStdev(): Double = srdd.sampleStdev()
+  def sampleStdev(): JDouble = srdd.sampleStdev()
 
   /**
    * Compute the sample variance of this RDD's elements (which corrects for bias in
    * estimating the standard variance by dividing by N-1 instead of N).
    */
-  def sampleVariance(): Double = srdd.sampleVariance()
+  def sampleVariance(): JDouble = srdd.sampleVariance()
 
   /** Return the approximate mean of the elements in this RDD. */
-  def meanApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] =
+  def meanApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] =
     srdd.meanApprox(timeout, confidence)
 
   /** (Experimental) Approximate operation to return the mean within a timeout. */
   def meanApprox(timeout: Long): PartialResult[BoundedDouble] = srdd.meanApprox(timeout)
 
   /** (Experimental) Approximate operation to return the sum within a timeout. */
-  def sumApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] =
+  def sumApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] =
     srdd.sumApprox(timeout, confidence)
 
   /** (Experimental) Approximate operation to return the sum within a timeout. */
@@ -222,7 +220,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
     srdd.histogram(buckets, false)
   }
 
-  def histogram(buckets: Array[Double], evenBuckets: Boolean): Array[Long] = {
+  def histogram(buckets: Array[JDouble], evenBuckets: Boolean): Array[Long] = {
     srdd.histogram(buckets.map(_.toDouble), evenBuckets)
   }
 
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
index 7b73057953552fe6bca94db0c2d3e7a7820c392f..0055c98844ded3e38a01e902d60fd82a4978338f 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
@@ -20,8 +20,8 @@ package org.apache.spark.api.java
 import scala.reflect.ClassTag
 
 import org.apache.spark._
-import org.apache.spark.rdd.RDD
 import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
 
 class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
index 3e85052cd03e8323f208bed5d37c86979ca75a96..30e6a524742c4a8621a3df084cb433fb3dc0bf7c 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
@@ -17,7 +17,6 @@
 
 package org.apache.spark.api.java.function;
 
-
 import java.io.Serializable;
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
index 5e9b8c48b89aa575d1800159dc5d5208500983cc..490da255bc91203a23b68a50a3b8c34f90b8efce 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
@@ -17,7 +17,6 @@
 
 package org.apache.spark.api.java.function;
 
-
 import java.io.Serializable;
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function.java b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
index 537439ef53888e9fc22d5399aa956b44d5fa7d7b..e0fcd460c8404972e6a7a2d1a5fea387014131f7 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
@@ -17,11 +17,10 @@
 
 package org.apache.spark.api.java.function;
 
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
-
 import java.io.Serializable;
 
+import scala.reflect.ClassTag;
+import scala.reflect.ClassTag$;
 
 /**
  * Base class for functions whose return types do not create special RDDs. PairFunction and
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
index a2d1214fb46929fc819f54e28a6454e6012d44fe..16d7379462e15a96fbe2acfe7eca7f96cb16825b 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
@@ -17,11 +17,11 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
 
-import java.io.Serializable;
-
 /**
  * A two-argument function that takes arguments of type T1 and T2 and returns an R.
  */
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
index fb1deceab5b562b0c0d54b49d79052540dd921ef..096eb71f95cd064cf2698772d178b45a86e72d47 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
@@ -17,11 +17,10 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
-import scala.runtime.AbstractFunction2;
-
-import java.io.Serializable;
 
 /**
  * A three-argument function that takes arguments of type T1, T2 and T3 and returns an R.
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
index ca485b3cc2d9348484d27a88f2e8c43be73959bb..c72b98c28a0cb44438ff993396136898f3699f13 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
@@ -17,12 +17,12 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.Tuple2;
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
 
-import java.io.Serializable;
-
 /**
  * A function that returns zero or more key-value pair records from each input record. The
  * key-value pairs are represented as scala.Tuple2 objects.
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
index cbe2306026a61fd51b0fc713ec2ca38126a3f972..84b9136d98aa2dbadf9fad265bc4e6ded8723888 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
@@ -17,12 +17,12 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.Tuple2;
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
 
-import java.io.Serializable;
-
 /**
  * A function that returns key-value pairs (Tuple2<K, V>), and can be used to construct PairRDDs.
  */
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
index 35eca62ecd5869e4869b4770d75fe4d8277347ce..95bec5030bfdd34a7cef0b4eb16a1f98b4de3690 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.api.python
 
-import java.util.Arrays
-
 import org.apache.spark.Partitioner
 import org.apache.spark.util.Utils
 
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index 33667a998ed417003f4ddf808bb85c2de57ebbb1..e4d0285710e84a427eb956483907858aa067c20f 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -24,9 +24,9 @@ import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, Collectio
 import scala.collection.JavaConversions._
 import scala.reflect.ClassTag
 
+import org.apache.spark._
 import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
 import org.apache.spark.broadcast.Broadcast
-import org.apache.spark._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.util.Utils
 
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
index f291266fcf17c1546f34e6e12bc66accc6836e24..a5f0f3d5e7eaecf7931cd9c53fe10fdef65c1981 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.api.python
 
-import java.io.{OutputStreamWriter, File, DataInputStream, IOException}
-import java.net.{ServerSocket, Socket, SocketException, InetAddress}
+import java.io.{DataInputStream, File, IOException, OutputStreamWriter}
+import java.net.{InetAddress, ServerSocket, Socket, SocketException}
 
 import scala.collection.JavaConversions._
 
diff --git a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
index 39ee0dbb92841da7d1508d757e737a2404c18fa7..20207c261320b2698f48ae26c22f55cedbe4e3b0 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
@@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit
 import it.unimi.dsi.fastutil.io.FastBufferedInputStream
 import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
 
-import org.apache.spark.{SparkConf, HttpServer, Logging, SparkEnv}
+import org.apache.spark.{HttpServer, Logging, SparkConf, SparkEnv}
 import org.apache.spark.io.CompressionCodec
 import org.apache.spark.storage.{BroadcastBlockId, StorageLevel}
 import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashSet, Utils}
diff --git a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
index ec997255d59a1aa91fbdad0e84c89f65b3749f52..22d783c8590c697d6c30d6ed413b07ec8b77cd7b 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
@@ -26,7 +26,6 @@ import org.apache.spark._
 import org.apache.spark.storage.{BroadcastBlockId, BroadcastHelperBlockId, StorageLevel}
 import org.apache.spark.util.Utils
 
-
 private[spark] class TorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
 extends Broadcast[T](id) with Logging with Serializable {
 
diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala
index 9987e2300ceb75f8411653cdc7751ae88ee558ed..eb5676b51d83633cfbe02f4e8c92a49aa61e204f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala
@@ -23,13 +23,13 @@ import scala.concurrent._
 
 import akka.actor._
 import akka.pattern.ask
+import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent}
 import org.apache.log4j.{Level, Logger}
 
 import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.deploy.DeployMessages._
 import org.apache.spark.deploy.master.{DriverState, Master}
 import org.apache.spark.util.{AkkaUtils, Utils}
-import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent}
 
 /**
  * Proxy that relays messages to the driver.
diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
index 7de7c4864ee549d955d794b80add9fcab3ef7c93..190b331cfe7d888dba7375154cc20d0c8f3d39aa 100644
--- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
@@ -21,10 +21,10 @@ import java.io._
 import java.net.URL
 import java.util.concurrent.TimeoutException
 
+import scala.collection.mutable.ListBuffer
 import scala.concurrent.{Await, future, promise}
-import scala.concurrent.duration._
 import scala.concurrent.ExecutionContext.Implicits.global
-import scala.collection.mutable.ListBuffer
+import scala.concurrent.duration._
 import scala.sys.process._
 
 import net.liftweb.json.JsonParser
diff --git a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
index 33e69371b834433317ef2c67cc0708894aee61c9..318beb5db5214c2ca0786f836c03728462de770b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
@@ -20,10 +20,9 @@ package org.apache.spark.deploy
 import net.liftweb.json.JsonDSL._
 
 import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
-import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo, DriverInfo}
+import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
 import org.apache.spark.deploy.worker.ExecutorRunner
 
-
 private[spark] object JsonProtocol {
  def writeWorkerInfo(obj: WorkerInfo) = {
    ("id" -> obj.id) ~
diff --git a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
index 488843a32c167b7dd8903c263b3c4c2cbdfef215..a73b459c3cea1ed2f1505d881d85e48b1844d33e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.deploy
 
+import scala.collection.mutable.ArrayBuffer
+
 import akka.actor.ActorSystem
 
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.deploy.worker.Worker
 import org.apache.spark.deploy.master.Master
 import org.apache.spark.util.Utils
-import org.apache.spark.{SparkConf, Logging}
-
-import scala.collection.mutable.ArrayBuffer
 
 /**
  * Testing class that creates a Spark standalone process in-cluster (that is, running the
diff --git a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
index 80179320325deec9871e12e1fed5be135ea83072..1550c3eb4286bee50eaca682b11a797465aa1e9c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.deploy.client
 
-import org.apache.spark.util.{Utils, AkkaUtils}
-import org.apache.spark.{SparkConf, SparkContext, Logging}
-import org.apache.spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.deploy.{ApplicationDescription, Command}
+import org.apache.spark.util.{AkkaUtils, Utils}
 
 private[spark] object TestClient {
 
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
index 3e26379166885fac81d7320a0d8f5ce8ec095889..e8867bc1691d35aa279e0d68d4fe234ce9b42a7d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
@@ -17,11 +17,14 @@
 
 package org.apache.spark.deploy.master
 
-import org.apache.spark.deploy.ApplicationDescription
 import java.util.Date
-import akka.actor.ActorRef
+
 import scala.collection.mutable
 
+import akka.actor.ActorRef
+
+import org.apache.spark.deploy.ApplicationDescription
+
 private[spark] class ApplicationInfo(
     val startTime: Long,
     val id: String,
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
index 74bb9ebf1db4a3e52fedb4c14db7cf63d9f6d22f..aa85aa060d9c101168eec1518df4a38b92cdaed5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
@@ -20,6 +20,7 @@ package org.apache.spark.deploy.master
 import java.io._
 
 import akka.serialization.Serialization
+
 import org.apache.spark.Logging
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index e44f90c1412e2a4acc935917fa2d307c47a68056..51794ce40cb453ae4e8056a7a30bb63d5b3c7da1 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -30,15 +30,14 @@ import akka.pattern.ask
 import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent}
 import akka.serialization.SerializationExtension
 
-
-import org.apache.spark.{SparkConf, Logging, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState}
 import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.DriverState.DriverState
 import org.apache.spark.deploy.master.MasterMessages._
 import org.apache.spark.deploy.master.ui.MasterWebUI
 import org.apache.spark.metrics.MetricsSystem
 import org.apache.spark.util.{AkkaUtils, Utils}
-import org.apache.spark.deploy.master.DriverState.DriverState
 
 private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging {
   import context.dispatcher   // to use Akka's scheduler.schedule()
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
index e7f3224091ca8b4475a041de30f53afadd00f5f3..a87781fb93850ac851066e88bb2cb1b221cc3f71 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.deploy.master
 
-import org.apache.spark.util.{Utils, IntParam}
 import org.apache.spark.SparkConf
+import org.apache.spark.util.{IntParam, Utils}
 
 /**
  * Command-line parser for the master.
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
index 999090ad7464dd5becec6c7a5ee4d2e8cb784fca..57758055b19c0b93cb7ddd2b8b984ea2779b65c5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
@@ -23,7 +23,7 @@ import org.apache.zookeeper._
 import org.apache.zookeeper.Watcher.Event.KeeperState
 import org.apache.zookeeper.data.Stat
 
-import org.apache.spark.{SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
 
 /**
  * Provides a Scala-side interface to the standard ZooKeeper client, with the addition of retry
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
index 77c23fb9fbeea5b23266e66fb627a964cdaae2b3..47b8f67f8a45b475918076360b237a8e485761ad 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
@@ -21,7 +21,7 @@ import akka.actor.ActorRef
 import org.apache.zookeeper._
 import org.apache.zookeeper.Watcher.Event.EventType
 
-import org.apache.spark.{SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.deploy.master.MasterMessages._
 
 private[spark] class ZooKeeperLeaderElectionAgent(val masterActor: ActorRef,
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
index 10816a1f43ff6131202349a03ccbf4fb39b77540..48b2fc06a9d7069b38027cc87c2d4a81c41f0feb 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark.deploy.master
 
-import org.apache.spark.{SparkConf, Logging}
+import akka.serialization.Serialization
 import org.apache.zookeeper._
 
-import akka.serialization.Serialization
+import org.apache.spark.{Logging, SparkConf}
 
 class ZooKeeperPersistenceEngine(serialization: Serialization, conf: SparkConf)
   extends PersistenceEngine
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
index f29a6ad2e7b92c9a876a7cf636929166718374c7..5cc4adbe448b746ba129d551a52650c595b22ed3 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark.deploy.master.ui
 
+import javax.servlet.http.HttpServletRequest
+
 import scala.concurrent.Await
 import scala.xml.Node
 
 import akka.pattern.ask
-import javax.servlet.http.HttpServletRequest
 import net.liftweb.json.JsonAST.JValue
 
 import org.apache.spark.deploy.JsonProtocol
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
index 04f9a22a25a94cd8212635490cf51c1db5da79d7..01c8f9065e50ae4b44d799a3e378e28cfabce922 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.deploy.master.ui
 
+import javax.servlet.http.HttpServletRequest
+
 import scala.concurrent.Await
-import scala.concurrent.duration._
 import scala.xml.Node
 
 import akka.pattern.ask
-import javax.servlet.http.HttpServletRequest
 import net.liftweb.json.JsonAST.JValue
 
 import org.apache.spark.deploy.{DeployWebUI, JsonProtocol}
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index 05c4df891ed75ba12cd62f7330e3be3884d772f6..5ab13e7aa6b1fdf9950f989a4953944b38e72203 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.deploy.master.ui
 
 import javax.servlet.http.HttpServletRequest
+
 import org.eclipse.jetty.server.{Handler, Server}
 
 import org.apache.spark.Logging
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
index 2ceccc703d2912581687a9179286baa6b55a281f..0c761dfc93a1f28680d5d3c222783ff9f777a55b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.deploy.worker
 
-import java.io.{File, FileOutputStream, IOException, InputStream}
+import java.io.{File, FileOutputStream, InputStream, IOException}
 import java.lang.System._
 
 import org.apache.spark.Logging
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index 18885d7ca6daa2ea4124c9e838fc631da7a8be5b..2edd921066876ae242469d7b3c1d0712fef29556 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -20,12 +20,11 @@ package org.apache.spark.deploy.worker
 import java.io._
 
 import akka.actor.ActorRef
-
 import com.google.common.base.Charsets
 import com.google.common.io.Files
 
 import org.apache.spark.Logging
-import org.apache.spark.deploy.{ExecutorState, ApplicationDescription, Command}
+import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
 import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index f4ee0e2343849b2696c1b133d0c2671ce8504da7..7b0b7861b76e113968377cdad261c344d9935379 100755
--- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -31,7 +31,6 @@ import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.deploy.{ExecutorDescription, ExecutorState}
 import org.apache.spark.deploy.DeployMessages._
 import org.apache.spark.deploy.master.{DriverState, Master}
-import org.apache.spark.deploy.master.DriverState.DriverState
 import org.apache.spark.deploy.worker.ui.WorkerWebUI
 import org.apache.spark.metrics.MetricsSystem
 import org.apache.spark.util.{AkkaUtils, Utils}
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index 3ed528e6b3773cf7bc3bf903d24adc7970f13752..d35d5be73ff97f225cb4eb5bef068e8b1f499162 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.deploy.worker
 
-import org.apache.spark.util.{Utils, IntParam, MemoryParam}
 import java.lang.management.ManagementFactory
 
+import org.apache.spark.util.{IntParam, MemoryParam, Utils}
+
 /**
  * Command-line parser for the master.
  */
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
index 86688e44242a9b04a33aad4a0a6b00cbbe8de803..bdf126f93abc83f8ce8f51611314f24b89ac9124 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
@@ -18,11 +18,11 @@
 package org.apache.spark.deploy.worker.ui
 
 import java.io.File
-
 import javax.servlet.http.HttpServletRequest
+
 import org.eclipse.jetty.server.{Handler, Server}
 
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.Logging
 import org.apache.spark.deploy.worker.Worker
 import org.apache.spark.ui.{JettyUtils, UIUtils}
 import org.apache.spark.ui.JettyUtils._
diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index 45b43b403dd8c087fa730300b53a29e4352b4027..0aae569b172726dcdc22cccedb7aaa25b1b0f11f 100644
--- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@ -22,7 +22,7 @@ import java.nio.ByteBuffer
 import akka.actor._
 import akka.remote._
 
-import org.apache.spark.{SparkConf, SparkContext, Logging}
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.deploy.worker.WorkerWatcher
 import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
index ad7dd34c769404bea090af0faa56ed95b9d2b323..3d34960653f5da9d6332dc7a8cf061cb7107fc06 100644
--- a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.executor
 
 import java.nio.ByteBuffer
+
 import org.apache.spark.TaskState.TaskState
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
index c2e973e1738d428356e1402c6f4f64df568eb4fd..127f5e90f3e1afb087f21e7ab2cb15b9e613bd9a 100644
--- a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
@@ -17,12 +17,11 @@
 
 package org.apache.spark.executor
 
-import com.codahale.metrics.{Gauge, MetricRegistry}
+import scala.collection.JavaConversions._
 
+import com.codahale.metrics.{Gauge, MetricRegistry}
 import org.apache.hadoop.fs.FileSystem
 
-import scala.collection.JavaConversions._
-
 import org.apache.spark.metrics.source.Source
 
 class ExecutorSource(val executor: Executor, executorId: String) extends Source {
diff --git a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
index b56d8c99124dfbc7ea7c136464c5c2f0832bf735..6fc702fdb1512cc36a721bef16cb18417c9960bf 100644
--- a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
@@ -20,8 +20,7 @@ package org.apache.spark.executor
 import java.nio.ByteBuffer
 
 import com.google.protobuf.ByteString
-
-import org.apache.mesos.{Executor => MesosExecutor, MesosExecutorDriver, MesosNativeLibrary, ExecutorDriver}
+import org.apache.mesos.{Executor => MesosExecutor, ExecutorDriver, MesosExecutorDriver, MesosNativeLibrary}
 import org.apache.mesos.Protos.{TaskStatus => MesosTaskStatus, _}
 
 import org.apache.spark.Logging
@@ -29,7 +28,6 @@ import org.apache.spark.TaskState
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.util.Utils
 
-
 private[spark] class MesosExecutorBackend
   extends MesosExecutor
   with ExecutorBackend
diff --git a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index 59801773205bdab204e25bb227995361feebf812..848b5c439bb5b2cebf306243dab4b7bd9dd2fc17 100644
--- a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -20,10 +20,9 @@ package org.apache.spark.io
 import java.io.{InputStream, OutputStream}
 
 import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream}
-
 import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
-import org.apache.spark.{SparkEnv, SparkConf}
 
+import org.apache.spark.SparkConf
 
 /**
  * CompressionCodec allows the customization of choosing different compression implementations
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
index e54ac0b332093b0306473c08d8871213105f7487..6883a544945984505bb9ebf330ff135f106d6605 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.metrics
 
+import java.io.{FileInputStream, InputStream}
 import java.util.Properties
-import java.io.{File, FileInputStream, InputStream, IOException}
 
 import scala.collection.mutable
 import scala.util.matching.Regex
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index de233e416a9dcd6df264cd8a4aadf5a1049a0f24..966c0921242665788068a07700fc15790a76f5f0 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.metrics
 
-import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
-
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 
 import scala.collection.mutable
 
-import org.apache.spark.{SparkConf, Logging}
+import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
+
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.metrics.sink.{MetricsServlet, Sink}
 import org.apache.spark.metrics.source.Source
 
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
index bce257d6e6f47bbd4e582baedb21410aa639b412..98fa1dbd7c6abf412a9d35f8ac6bbe42f18bc4bb 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
@@ -17,11 +17,11 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
-
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 
+import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
+
 import org.apache.spark.metrics.MetricsSystem
 
 class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink {
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
index 3d1a06a395a724fe5f6f6496c30fd4fe9e47669f..40f64768e68857955917986d166008adaedfd044 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.{CsvReporter, MetricRegistry}
-
 import java.io.File
 import java.util.{Locale, Properties}
 import java.util.concurrent.TimeUnit
 
+import com.codahale.metrics.{CsvReporter, MetricRegistry}
+
 import org.apache.spark.metrics.MetricsSystem
 
 class CsvSink(val property: Properties, val registry: MetricRegistry) extends Sink {
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
index b924907070eb9204c8f74f8f23ea3ef9ac6a9436..410ca0704b5c4d3d02417f806c24f2800be3b586 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
@@ -20,8 +20,8 @@ package org.apache.spark.metrics.sink
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 
-import com.codahale.metrics.ganglia.GangliaReporter
 import com.codahale.metrics.MetricRegistry
+import com.codahale.metrics.ganglia.GangliaReporter
 import info.ganglia.gmetric4j.gmetric.GMetric
 
 import org.apache.spark.metrics.MetricsSystem
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
index cdcfec8ca785bcafc3574164654f699d12da4768..e09be001421fc7189637394a9661cf136079566d 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.metrics.sink
 
+import java.net.InetSocketAddress
 import java.util.Properties
 import java.util.concurrent.TimeUnit
-import java.net.InetSocketAddress
 
 import com.codahale.metrics.MetricRegistry
-import com.codahale.metrics.graphite.{GraphiteReporter, Graphite}
+import com.codahale.metrics.graphite.{Graphite, GraphiteReporter}
 
 import org.apache.spark.metrics.MetricsSystem
 
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
index 621d086d415ccb920ab85dffe4023916246ba5cb..b5cf210af2119f8a52ed7d14b5aa93a475de4772 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.{JmxReporter, MetricRegistry}
-
 import java.util.Properties
 
+import com.codahale.metrics.{JmxReporter, MetricRegistry}
+
 class JmxSink(val property: Properties, val registry: MetricRegistry) extends Sink {
   val reporter: JmxReporter = JmxReporter.forRegistry(registry).build()
 
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
index 99357fede6d0640b3732ba1e8eb6ec720e5ff645..3cdfe26d40f669bd2ae175276ce558f1aa1a8731 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
@@ -17,15 +17,13 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.MetricRegistry
-import com.codahale.metrics.json.MetricsModule
-
-import com.fasterxml.jackson.databind.ObjectMapper
-
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 import javax.servlet.http.HttpServletRequest
 
+import com.codahale.metrics.MetricRegistry
+import com.codahale.metrics.json.MetricsModule
+import com.fasterxml.jackson.databind.ObjectMapper
 import org.eclipse.jetty.server.Handler
 
 import org.apache.spark.ui.JettyUtils
diff --git a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
index fb4c65909a9e2c62a4049d01c4e781c4e6cb53ef..d3c09b16063d6a61c7fce6b32de615e990667f6f 100644
--- a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
@@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer
 
 import org.apache.spark.storage.BlockManager
 
-
 private[spark]
 class BufferMessage(id_ : Int, val buffers: ArrayBuffer[ByteBuffer], var ackId: Int)
   extends Message(Message.BUFFER_MESSAGE, id_) {
diff --git a/core/src/main/scala/org/apache/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala
index ae2007e41b77fd4c289a0080586c9128c362acf8..f2e3c1a14ecc63ee91b8e70911e3b16dfc0ebd40 100644
--- a/core/src/main/scala/org/apache/spark/network/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/Connection.scala
@@ -17,16 +17,13 @@
 
 package org.apache.spark.network
 
-import org.apache.spark._
-
-import scala.collection.mutable.{HashMap, Queue, ArrayBuffer}
-
-import java.io._
+import java.net._
 import java.nio._
 import java.nio.channels._
-import java.nio.channels.spi._
-import java.net._
 
+import scala.collection.mutable.{ArrayBuffer, HashMap, Queue}
+
+import org.apache.spark._
 
 private[spark]
 abstract class Connection(val channel: SocketChannel, val selector: Selector,
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
index a78d6ac70f8caa86d42776b784bf2ff20d20ca7a..3dd82bee0b5fd933890023e4b81bee28a08162fa 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
@@ -17,24 +17,21 @@
 
 package org.apache.spark.network
 
-import org.apache.spark._
-
+import java.net._
 import java.nio._
 import java.nio.channels._
 import java.nio.channels.spi._
-import java.net._
 import java.util.concurrent.{LinkedBlockingDeque, TimeUnit, ThreadPoolExecutor}
 
-import scala.collection.mutable.HashSet
+import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
+import scala.collection.mutable.HashSet
 import scala.collection.mutable.SynchronizedMap
 import scala.collection.mutable.SynchronizedQueue
-import scala.collection.mutable.ArrayBuffer
-
-import scala.concurrent.{Await, Promise, ExecutionContext, Future}
-import scala.concurrent.duration.Duration
+import scala.concurrent.{Await, ExecutionContext, Future, Promise}
 import scala.concurrent.duration._
 
+import org.apache.spark._
 import org.apache.spark.util.Utils
 
 private[spark] class ConnectionManager(port: Int, conf: SparkConf) extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
index 50dd9bc2d101fe721cd07d474a4dd9f3445580b3..b82edb6850d23972d152489a48ae360b5211dba4 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
@@ -21,7 +21,6 @@ import java.net.InetSocketAddress
 
 import org.apache.spark.util.Utils
 
-
 private[spark] case class ConnectionManagerId(host: String, port: Int) {
   // DEBUG code
   Utils.checkHost(host)
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
index 8e5c5296cb8d1890fa3a4cb28395e08b5138cbd8..35f64134b073aa3e16f338d3e173fb8eb6a7b490 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
@@ -17,16 +17,13 @@
 
 package org.apache.spark.network
 
-import org.apache.spark._
-import org.apache.spark.SparkContext._
-
-import scala.io.Source
-
 import java.nio.ByteBuffer
-import java.net.InetAddress
 
 import scala.concurrent.Await
 import scala.concurrent.duration._
+import scala.io.Source
+
+import org.apache.spark._
 
 private[spark] object ConnectionManagerTest extends Logging{
   def main(args: Array[String]) {
diff --git a/core/src/main/scala/org/apache/spark/network/Message.scala b/core/src/main/scala/org/apache/spark/network/Message.scala
index 2612884bdbe158fdc8e11866995919d02b9a18eb..20fe67661844f3f48768212a23c0170762b932df 100644
--- a/core/src/main/scala/org/apache/spark/network/Message.scala
+++ b/core/src/main/scala/org/apache/spark/network/Message.scala
@@ -17,12 +17,11 @@
 
 package org.apache.spark.network
 
-import java.nio.ByteBuffer
 import java.net.InetSocketAddress
+import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
 
-
 private[spark] abstract class Message(val typ: Long, val id: Int) {
   var senderAddress: InetSocketAddress = null
   var started = false
diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
index e0fe57b80d5cd693080f9e7e79fb5a8a4f01f569..d0f986a12bfe0d393d07154286f2f4128227f649 100644
--- a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
@@ -21,7 +21,6 @@ import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
 
-
 private[network]
 class MessageChunk(val header: MessageChunkHeader, val buffer: ByteBuffer) {
 
diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
index 235fbc39b3bd254e5457652378abf3f1c956377a..9bcbc6141a502afb5caaecc138254c09bedbbc42 100644
--- a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
@@ -21,7 +21,6 @@ import java.net.InetAddress
 import java.net.InetSocketAddress
 import java.nio.ByteBuffer
 
-
 private[spark] class MessageChunkHeader(
     val typ: Long,
     val id: Int,
diff --git a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
index 1c9d6030d68d76f886ad212e34c864b2b244d5b2..9976255c7e251190b8ce759a1eabd33f13c1b6b4 100644
--- a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.network
 
 import java.nio.ByteBuffer
-import java.net.InetAddress
+
 import org.apache.spark.SparkConf
 
 private[spark] object ReceiverTest {
diff --git a/core/src/main/scala/org/apache/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
index 162d49bf6161732db9a83005fe293efeb5e09156..646f8425d9551ff7e75c9f2dfdb47c3be03881c7 100644
--- a/core/src/main/scala/org/apache/spark/network/SenderTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.network
 
 import java.nio.ByteBuffer
-import java.net.InetAddress
+
 import org.apache.spark.SparkConf
 
 private[spark] object SenderTest {
diff --git a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
index 1b9fa1e53a9f75932884ee78357eb5561f781902..f9082ffb9141ab273417ddc51f647e2440b0249b 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
@@ -20,7 +20,7 @@ package org.apache.spark.network.netty
 import io.netty.buffer._
 
 import org.apache.spark.Logging
-import org.apache.spark.storage.{TestBlockId, BlockId}
+import org.apache.spark.storage.{BlockId, TestBlockId}
 
 private[spark] class FileHeader (
   val fileLen: Int,
diff --git a/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
index d87157e12c4876201371663ee94dc1f4a8c6a32d..e7b2855e1ec91ab0118682b292d3a03df7a5477b 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
@@ -19,17 +19,16 @@ package org.apache.spark.network.netty
 
 import java.util.concurrent.Executors
 
+import scala.collection.JavaConverters._
+
 import io.netty.buffer.ByteBuf
 import io.netty.channel.ChannelHandlerContext
 import io.netty.util.CharsetUtil
 
-import org.apache.spark.{SparkContext, SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.network.ConnectionManagerId
-
-import scala.collection.JavaConverters._
 import org.apache.spark.storage.BlockId
 
-
 private[spark] class ShuffleCopier(conf: SparkConf) extends Logging {
 
   def getBlock(host: String, port: Int, blockId: BlockId,
diff --git a/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
index 44204a8c46572169c950a09b23f54d7b452fdbec..7ef7aecc6a9fb01524e16d298eac9bb99ff09392 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
@@ -23,7 +23,6 @@ import org.apache.spark.Logging
 import org.apache.spark.util.Utils
 import org.apache.spark.storage.{BlockId, FileSegment}
 
-
 private[spark] class ShuffleSender(portIn: Int, val pResolver: PathResolver) extends Logging {
 
   val server = new FileServer(pResolver, portIn)
diff --git a/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
index 423ff67a5fd43700e586b9e28930ef0f532090d8..d25452daf760639a4b429f39d142dd682615cee7 100644
--- a/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
+++ b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
@@ -18,8 +18,8 @@
 package org.apache.spark.partial
 
 import org.apache.spark._
-import org.apache.spark.scheduler.JobListener
 import org.apache.spark.rdd.RDD
+import org.apache.spark.scheduler.JobListener
 
 /**
  * A JobListener for an approximate single-result action, such as count() or non-parallel reduce().
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
index e519e3a54846e4b609f319cdb9c508db81542551..40b70baabcad94c2f96f3f9776a8112713e07864 100644
--- a/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
@@ -18,14 +18,12 @@
 package org.apache.spark.partial
 
 import java.util.{HashMap => JHashMap}
-import java.util.{Map => JMap}
 
+import scala.collection.JavaConversions.mapAsScalaMap
 import scala.collection.Map
 import scala.collection.mutable.HashMap
-import scala.collection.JavaConversions.mapAsScalaMap
 
 import cern.jet.stat.Probability
-
 import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
index cf8a5680b663d0663f306d3a88eae72e25c5c968..b5111891ed35b1b74fdb39e656cf6fbbeb8bb217 100644
--- a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
@@ -18,11 +18,10 @@
 package org.apache.spark.partial
 
 import java.util.{HashMap => JHashMap}
-import java.util.{Map => JMap}
 
-import scala.collection.mutable.HashMap
-import scala.collection.Map
 import scala.collection.JavaConversions.mapAsScalaMap
+import scala.collection.Map
+import scala.collection.mutable.HashMap
 
 import org.apache.spark.util.StatCounter
 
diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
index 8225a5d933ce50261db5edb4bb8c372d1915068f..442fb86227d8699e2a4c253204659e7dae9f38c9 100644
--- a/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
@@ -18,11 +18,10 @@
 package org.apache.spark.partial
 
 import java.util.{HashMap => JHashMap}
-import java.util.{Map => JMap}
 
-import scala.collection.mutable.HashMap
-import scala.collection.Map
 import scala.collection.JavaConversions.mapAsScalaMap
+import scala.collection.Map
+import scala.collection.mutable.HashMap
 
 import org.apache.spark.util.StatCounter
 
diff --git a/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
index 424354ae165a021d15243eaa89ac157d060a6f83..e6c4a6d3794a03ff2ec2dbc3269abfc5b4f41ac4 100644
--- a/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
@@ -19,7 +19,7 @@ package org.apache.spark.rdd
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext}
+import org.apache.spark.{Partition, SparkContext, SparkEnv, TaskContext}
 import org.apache.spark.storage.{BlockId, BlockManager}
 
 private[spark] class BlockRDDPartition(val blockId: BlockId, idx: Int) extends Partition {
diff --git a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
index 87b950ba431f9b58ef7934d7c6c82def247a6d61..4908711d17db768d8663190330755537c348fedc 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.rdd
 
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
+
 import scala.reflect.ClassTag
-import org.apache.spark._
 
+import org.apache.spark._
 
 private[spark]
 class CartesianPartition(
diff --git a/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
index 8f9d1d5a84c36c6f28a955de950d711ad8a6ddf5..888af541cf9705134eaede104e2a8e328148a2e8 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
@@ -18,12 +18,15 @@
 package org.apache.spark.rdd
 
 import java.io.IOException
+
 import scala.reflect.ClassTag
+
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.Path
+
 import org.apache.spark._
 import org.apache.spark.broadcast.Broadcast
 import org.apache.spark.deploy.SparkHadoopUtil
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.Path
 
 private[spark] class CheckpointRDDPartition(val index: Int) extends Partition {}
 
diff --git a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
index 0e47f2e0226104ba2c4edf16ad4405ecf0d11a78..699a10c96c2273e846b4b1de6d2d4d48dc0a1267 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.rdd
 
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
 
 import scala.collection.mutable.ArrayBuffer
 
diff --git a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
index dc345b2df079baf1b6754b38fb704f90ae63701e..4e82b51313bf0458e5be7f6ea90e4f5ce5bb7073 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
@@ -17,13 +17,14 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark._
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
+
 import scala.collection.mutable
-import scala.Some
 import scala.collection.mutable.ArrayBuffer
 import scala.reflect.ClassTag
 
+import org.apache.spark._
+
 /**
  * Class that captures a coalesced RDD by essentially keeping track of parent partitions
  * @param index of this coalesced partition
diff --git a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
index 20713b4249b5e5751459d5085b5f434d1fe53281..a7b6b3b5146cec7b1542feca1476c07ba6cd1cb2 100644
--- a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
@@ -17,14 +17,12 @@
 
 package org.apache.spark.rdd
 
+import org.apache.spark.{TaskContext, Logging}
 import org.apache.spark.partial.BoundedDouble
 import org.apache.spark.partial.MeanEvaluator
 import org.apache.spark.partial.PartialResult
 import org.apache.spark.partial.SumEvaluator
 import org.apache.spark.util.StatCounter
-import org.apache.spark.{TaskContext, Logging}
-
-import scala.collection.immutable.NumericRange
 
 /**
  * Extra functions available on RDDs of Doubles through an implicit conversion.
diff --git a/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
index e74c83b90baa349f359790cb81ea511211ebe48d..9e41b3d1e2d4f5b803c3f5b4df136f14b78ce828 100644
--- a/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.{OneToOneDependency, Partition, TaskContext}
 import scala.reflect.ClassTag
 
+import org.apache.spark.{Partition, TaskContext}
+
 private[spark] class FilteredRDD[T: ClassTag](
     prev: RDD[T],
     f: T => Boolean)
diff --git a/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
index 4d1878fc142ac78a5821a3747a53af634a9b6628..d8f87d4e3690ec224c0e43133bb22e9508c58d07 100644
--- a/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.{Partition, TaskContext}
 import scala.reflect.ClassTag
 
+import org.apache.spark.{Partition, TaskContext}
 
 private[spark]
 class FlatMappedRDD[U: ClassTag, T: ClassTag](
diff --git a/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
index 82000bac092dac7986950e0d73bc6b4da78296ef..7c9023f62d3b6f9fa4538853b2cb8af071a25436 100644
--- a/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
@@ -17,8 +17,7 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.{TaskContext, Partition}
-
+import org.apache.spark.{Partition, TaskContext}
 
 private[spark]
 class FlatMappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => TraversableOnce[U])
diff --git a/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
index 1a694475f6975185fac2459c39e3719c4507b11d..f6463fa715a71221c332c978782343b9cfe1369f 100644
--- a/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.{Partition, TaskContext}
 import scala.reflect.ClassTag
 
+import org.apache.spark.{Partition, TaskContext}
+
 private[spark] class GlommedRDD[T: ClassTag](prev: RDD[T])
   extends RDD[Array[T]](prev) {
 
diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
index ad74d4636fb1b928b812e390b8d3af2261a4b470..a374fc4a871b01988987cec96879e023fc967062 100644
--- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
@@ -19,7 +19,7 @@ package org.apache.spark.rdd
 
 import java.io.EOFException
 
-import org.apache.hadoop.conf.{Configuration, Configurable}
+import org.apache.hadoop.conf.{Configurable, Configuration}
 import org.apache.hadoop.mapred.InputFormat
 import org.apache.hadoop.mapred.InputSplit
 import org.apache.hadoop.mapred.JobConf
@@ -32,7 +32,6 @@ import org.apache.spark.broadcast.Broadcast
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.util.NextIterator
 
-
 /**
  * A Spark split class that wraps around a Hadoop InputSplit.
  */
diff --git a/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
index db15baf503ad61e1253e988f1b22dd5084a3b094..4883fb828814c34a50de4fb8807164078be59f51 100644
--- a/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.{Partition, TaskContext}
 import scala.reflect.ClassTag
 
+import org.apache.spark.{Partition, TaskContext}
+
 private[spark] class MapPartitionsRDD[U: ClassTag, T: ClassTag](
     prev: RDD[T],
     f: (TaskContext, Int, Iterator[T]) => Iterator[U],  // (TaskContext, partition index, iterator)
diff --git a/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
index d33c1af58154c3b85fc7a5fa1a4f2bcd433ebe61..2bc47eb9fcd741f20855fd3a8adeb8449d719a6a 100644
--- a/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
@@ -17,8 +17,7 @@
 
 package org.apache.spark.rdd
 
-
-import org.apache.spark.{TaskContext, Partition}
+import org.apache.spark.{Partition, TaskContext}
 
 private[spark]
 class MappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => U)
diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
index 10d519e6971f1321602de95bdbc7038fb81fe18f..15bec39659e1744ee215a86363c06f069cd9fbb2 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -22,12 +22,13 @@ import java.text.SimpleDateFormat
 import java.util.Date
 import java.util.{HashMap => JHashMap}
 
+import scala.collection.JavaConversions._
 import scala.collection.Map
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
-import scala.collection.JavaConversions._
-import scala.reflect.{ClassTag, classTag}
+import scala.reflect.ClassTag
 
+import com.clearspring.analytics.stream.cardinality.HyperLogLog
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.io.SequenceFile.CompressionType
@@ -38,15 +39,14 @@ import org.apache.hadoop.mapreduce.{Job => NewAPIHadoopJob}
 import org.apache.hadoop.mapreduce.{RecordWriter => NewRecordWriter}
 import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat => NewFileOutputFormat}
 
-import com.clearspring.analytics.stream.cardinality.HyperLogLog
-
 // SparkHadoopWriter and SparkHadoopMapReduceUtil are actually source files defined in Spark.
 import org.apache.hadoop.mapred.SparkHadoopWriter
 import org.apache.hadoop.mapreduce.SparkHadoopMapReduceUtil
+
 import org.apache.spark._
+import org.apache.spark.Partitioner.defaultPartitioner
 import org.apache.spark.SparkContext._
 import org.apache.spark.partial.{BoundedDouble, PartialResult}
-import org.apache.spark.Partitioner.defaultPartitioner
 import org.apache.spark.util.SerializableHyperLogLog
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
index f270c1ac217575e9caa4a42eec24403b20a2617b..5f03d7d650a30dae1972ae28932a92f31904d3a5 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
@@ -17,14 +17,15 @@
 
 package org.apache.spark.rdd
 
+import java.io._
+
+import scala.Serializable
+import scala.collection.Map
 import scala.collection.immutable.NumericRange
 import scala.collection.mutable.ArrayBuffer
-import scala.collection.Map
 import scala.reflect.ClassTag
 
 import org.apache.spark._
-import java.io._
-import scala.Serializable
 import org.apache.spark.serializer.JavaSerializer
 import org.apache.spark.util.Utils
 
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
index ea8885b36e3aecd1cab9f81a64b6989547538843..b0440ca7f32cf553163fcf075a4b272cf5d30843 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
@@ -19,8 +19,7 @@ package org.apache.spark.rdd
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{NarrowDependency, SparkEnv, Partition, TaskContext}
-
+import org.apache.spark.{NarrowDependency, Partition, TaskContext}
 
 class PartitionPruningRDDPartition(idx: Int, val parentSplit: Partition) extends Partition {
   override val index = idx
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
index f4364329a3a71c11843c7c574c1ead063ed841d0..a84357b38414e9016c7665c00ffea97cf8deb2ca 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.rdd
 
+import java.io.{IOException, ObjectOutputStream}
+
 import scala.reflect.ClassTag
-import java.io.{ObjectOutputStream, IOException}
-import org.apache.spark.{TaskContext, OneToOneDependency, SparkContext, Partition}
 
+import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext}
 
 /**
  * Class representing partitions of PartitionerAwareUnionRDD, which maintains the list of
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
index a74309d861318a1bd2dfcb5d338a3fb13dc5a32f..ce4c0d382baab6d9ebaf2c8319990736b1d39cb0 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
@@ -21,7 +21,7 @@ import java.util.Random
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{TaskContext, Partition}
+import org.apache.spark.{Partition, TaskContext}
 import org.apache.spark.util.random.RandomSampler
 
 private[spark]
diff --git a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
index 8ef919c4b58cbed1e2704a56a5082578ab092c46..abd4414e81f5c8a7b61ae746e1fdbe26ba70c7bc 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
@@ -20,14 +20,13 @@ package org.apache.spark.rdd
 import java.io.PrintWriter
 import java.util.StringTokenizer
 
-import scala.collection.Map
 import scala.collection.JavaConversions._
+import scala.collection.Map
 import scala.collection.mutable.ArrayBuffer
 import scala.io.Source
 import scala.reflect.ClassTag
 
-import org.apache.spark.{SparkEnv, Partition, TaskContext}
-
+import org.apache.spark.{Partition, SparkEnv, TaskContext}
 
 /**
  * An RDD that pipes the contents of each parent partition through an external command
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index d4fc28f55137b6a1c39d6635784f59d37010296e..50320f40350cdb26c3c7c415ef7d7c8261c40892 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -22,30 +22,27 @@ import java.util.Random
 import scala.collection.Map
 import scala.collection.JavaConversions.mapAsScalaMap
 import scala.collection.mutable.ArrayBuffer
-
 import scala.reflect.{classTag, ClassTag}
 
+import com.clearspring.analytics.stream.cardinality.HyperLogLog
+import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
 import org.apache.hadoop.io.BytesWritable
 import org.apache.hadoop.io.compress.CompressionCodec
 import org.apache.hadoop.io.NullWritable
 import org.apache.hadoop.io.Text
 import org.apache.hadoop.mapred.TextOutputFormat
 
-import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
-import com.clearspring.analytics.stream.cardinality.HyperLogLog
-
+import org.apache.spark._
 import org.apache.spark.Partitioner._
+import org.apache.spark.SparkContext._
 import org.apache.spark.api.java.JavaRDD
 import org.apache.spark.partial.BoundedDouble
 import org.apache.spark.partial.CountEvaluator
 import org.apache.spark.partial.GroupedCountEvaluator
 import org.apache.spark.partial.PartialResult
 import org.apache.spark.storage.StorageLevel
-import org.apache.spark.util.{Utils, BoundedPriorityQueue, SerializableHyperLogLog}
-
-import org.apache.spark.SparkContext._
-import org.apache.spark._
-import org.apache.spark.util.random.{PoissonSampler, BernoulliSampler}
+import org.apache.spark.util.{BoundedPriorityQueue, SerializableHyperLogLog, Utils}
+import org.apache.spark.util.random.{BernoulliSampler, PoissonSampler}
 
 /**
  * A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. Represents an immutable,
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
index 73e8769c0981d75b416f51cde6446bee91869d88..953f0555e57c53324ebc4a25366511016cdd9b55 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
@@ -20,9 +20,8 @@ package org.apache.spark.rdd
 import scala.reflect.ClassTag
 
 import org.apache.hadoop.fs.Path
-import org.apache.hadoop.conf.Configuration
 
-import org.apache.spark.{SerializableWritable, Partition, SparkException, Logging}
+import org.apache.spark.{Logging, Partition, SerializableWritable, SparkException}
 import org.apache.spark.scheduler.{ResultTask, ShuffleMapTask}
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
index 08534b6f1db3e79133ce4ed2a35cc45643b474a2..b50307cfa49b7eac6d33a5dc16a07e1bd9bc918a 100644
--- a/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.rdd
 
-import scala.reflect.ClassTag
 import java.util.Random
 
+import scala.reflect.ClassTag
+
 import cern.jet.random.Poisson
 import cern.jet.random.engine.DRand
 
diff --git a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
index c9b4c768a98b405cdab7298b6c400fee8c597599..7df9a2960d8a59aa7a46f271c3db808596d35e95 100644
--- a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
@@ -16,15 +16,15 @@
  */
 package org.apache.spark.rdd
 
-import scala.reflect.{ ClassTag, classTag}
+import scala.reflect.{ClassTag, classTag}
 
+import org.apache.hadoop.io.Writable
+import org.apache.hadoop.io.compress.CompressionCodec
 import org.apache.hadoop.mapred.JobConf
 import org.apache.hadoop.mapred.SequenceFileOutputFormat
-import org.apache.hadoop.io.compress.CompressionCodec
-import org.apache.hadoop.io.Writable
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.Logging
+import org.apache.spark.SparkContext._
 
 /**
  * Extra functions available on RDDs of (key, value) pairs to create a Hadoop SequenceFile,
diff --git a/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
index 0ccb309d0d9d56a8fd5a7f762632ca218b488b3d..0bbda25a905cd6b26cbd58daefc3c74e72e7eac4 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
@@ -19,8 +19,7 @@ package org.apache.spark.rdd
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency,
-  SparkEnv, TaskContext}
+import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency, SparkEnv, TaskContext}
 
 private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
   override val index = idx
diff --git a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
index 4f90c7d3d68ab0dbd55c335756e861a92ae16170..5fe9f363db4537020737a4e87805dac2cc8188d8 100644
--- a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
@@ -23,14 +23,13 @@ import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 import scala.reflect.ClassTag
 
-import org.apache.spark.Partitioner
 import org.apache.spark.Dependency
-import org.apache.spark.TaskContext
+import org.apache.spark.OneToOneDependency
 import org.apache.spark.Partition
-import org.apache.spark.SparkEnv
+import org.apache.spark.Partitioner
 import org.apache.spark.ShuffleDependency
-import org.apache.spark.OneToOneDependency
-
+import org.apache.spark.SparkEnv
+import org.apache.spark.TaskContext
 
 /**
  * An optimized version of cogroup for set difference/subtraction.
diff --git a/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
index 08a41ac5583d52eab04eb1c380738f03708627dd..a4470307520966279c8f1ff0a2f4e54a20a01ed6 100644
--- a/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.rdd
 
+import java.io.{IOException, ObjectOutputStream}
+
 import scala.collection.mutable.ArrayBuffer
 import scala.reflect.ClassTag
 
-import org.apache.spark.{Dependency, RangeDependency, SparkContext, Partition, TaskContext}
-
-import java.io.{ObjectOutputStream, IOException}
+import org.apache.spark.{Dependency, Partition, RangeDependency, SparkContext, TaskContext}
 
 private[spark] class UnionPartition[T: ClassTag](idx: Int, rdd: RDD[T], splitIndex: Int)
   extends Partition {
diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
index 83be3c6eb40c480171c1b13369eec1ee71ebaba1..b56643444aa4012e2aefa62d16ae013817777f6c 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
@@ -17,10 +17,12 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext}
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
+
 import scala.reflect.ClassTag
 
+import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext}
+
 private[spark] class ZippedPartitionsPartition(
     idx: Int,
     @transient rdds: Seq[RDD[_]],
diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
index fb5b070c18d433d8beefd0f620648c8e096226cb..2119e76f0e032650b7c0f73f73cd42c7038b03d2 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext}
-
-import java.io.{ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectOutputStream}
 
 import scala.reflect.ClassTag
 
+import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext}
+
 private[spark] class ZippedPartition[T: ClassTag, U: ClassTag](
     idx: Int,
     @transient rdd1: RDD[T],
diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala
index 38dc114d80812f60a61ed6b29910a1b816dda835..e2c301603b4a5e59a422e4c54817a3d3fb68bf51 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala
@@ -19,7 +19,7 @@ package org.apache.spark.rdd
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{TaskContext, Partition}
+import org.apache.spark.{Partition, TaskContext}
 import org.apache.spark.util.Utils
 
 private[spark]
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
index 0b04607d019a8431b1ab1aa07f3bda77f3421151..9257f48559c9e88fcc6d030b00d865b2ad6d7ffe 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark.scheduler
 
-import org.apache.spark.TaskContext
-
 import java.util.Properties
 
+import org.apache.spark.TaskContext
+
 /**
  * Tracks information about an active job in the DAGScheduler.
  */
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index 80211541a6a633ec5b6593dd6d6c09dcde12e697..729f518b89c06013e2fb3213c61b9997b81387a5 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -28,9 +28,9 @@ import scala.reflect.ClassTag
 import akka.actor._
 
 import org.apache.spark._
-import org.apache.spark.rdd.RDD
 import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult}
+import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.{BlockId, BlockManager, BlockManagerMaster, RDDBlockId}
 import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap}
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
index add11876130b18eabf6cc8404f223cef31bafa9b..39cd98e2d74e4e1dc862a80201a3f47835259a24 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
@@ -22,8 +22,8 @@ import java.util.Properties
 import scala.collection.mutable.Map
 
 import org.apache.spark._
-import org.apache.spark.rdd.RDD
 import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.rdd.RDD
 
 /**
  * Types of events that can be handled by the DAGScheduler. The DAGScheduler uses an event queue
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
index 7b5c0e29ad84043474e443db395c6d98b92536f0..b52fe2410abdeab499d69f12f1f752020d5d6eaa 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
@@ -19,8 +19,8 @@ package org.apache.spark.scheduler
 
 import com.codahale.metrics.{Gauge,MetricRegistry}
 
-import org.apache.spark.metrics.source.Source
 import org.apache.spark.SparkContext
+import org.apache.spark.metrics.source.Source
 
 private[spark] class DAGSchedulerSource(val dagScheduler: DAGScheduler, sc: SparkContext)
     extends Source {
diff --git a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
index 23447f1bbf852956858768950cd5c395cccb4bb6..5555585c8b4cdfe6ff408d87660b6c7b1be533ea 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
@@ -17,17 +17,17 @@
 
 package org.apache.spark.scheduler
 
-import org.apache.spark.{Logging, SparkEnv}
-import org.apache.spark.deploy.SparkHadoopUtil
+import scala.collection.JavaConversions._
 import scala.collection.immutable.Set
+import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
+
+import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.mapred.{FileInputFormat, JobConf}
-import org.apache.hadoop.security.UserGroupInformation
-import org.apache.hadoop.util.ReflectionUtils
 import org.apache.hadoop.mapreduce.Job
-import org.apache.hadoop.conf.Configuration
-import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
-import scala.collection.JavaConversions._
+import org.apache.hadoop.util.ReflectionUtils
 
+import org.apache.spark.Logging
+import org.apache.spark.deploy.SparkHadoopUtil
 
 /**
  * Parses and holds information about inputFormat (and files) specified as a parameter.
diff --git a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
index b909b66a5de768d6bbe430bbefd17518d6078b67..9d75d7c4ad69ac6953b6eb7db5323df45e26899b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.scheduler
 
-import java.io.{IOException, File, FileNotFoundException, PrintWriter}
+import java.io.{File, FileNotFoundException, IOException, PrintWriter}
 import java.text.SimpleDateFormat
 import java.util.{Date, Properties}
 import java.util.concurrent.LinkedBlockingQueue
@@ -25,8 +25,8 @@ import java.util.concurrent.LinkedBlockingQueue
 import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
 
 import org.apache.spark._
-import org.apache.spark.rdd.RDD
 import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
index 1c61687f280064998b704c4d96422af4e5cd2057..d3f63ff92ac6f480483f4b6acbda488c728e67a8 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
@@ -17,8 +17,9 @@
 
 package org.apache.spark.scheduler
 
+import java.io.{Externalizable, ObjectInput, ObjectOutput}
+
 import org.apache.spark.storage.BlockManagerId
-import java.io.{ObjectOutput, ObjectInput, Externalizable}
 
 /**
  * Result returned by a ShuffleMapTask to a scheduler. Includes the block manager address that the
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
index 77b1682b3e47c28a56b7eaea777d240f3b8164ae..3fc6cc9850febbf1207a27ebcd6421b7b99f3d1c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
@@ -23,7 +23,7 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream}
 import org.apache.spark._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.rdd.RDDCheckpointData
-import org.apache.spark.util.{MetadataCleanerType, MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap}
 
 private[spark] object ResultTask {
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala b/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala
index d573e125a33d1af5eec279b8d84c5c74c63d1f73..ed24eb6a549dd274a46f60d4fef84b5f2749018b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.scheduler
 
+import scala.collection.mutable.ArrayBuffer
+
 import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
 
-import scala.collection.mutable.ArrayBuffer
 /**
  * An interface for schedulable entities.
  * there are two type of Schedulable entities(Pools and TaskSetManagers)
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
index a546193d5b49ae6f56b28311c0b1c138aa3b8385..e4eced383c3a5c57fe9047b8630ee186360d5431 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala
@@ -20,10 +20,10 @@ package org.apache.spark.scheduler
 import java.io.{FileInputStream, InputStream}
 import java.util.{NoSuchElementException, Properties}
 
-import org.apache.spark.{SparkConf, Logging}
-
 import scala.xml.XML
 
+import org.apache.spark.{Logging, SparkConf}
+
 /**
  * An interface to build Schedulable tree
  * buildPools: build the tree nodes(pools)
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala
index 02bdbba825781968fe672ca969e9919be7c3a319..eefc8c232b5646d26e9c6ed46bce68ccaf46af7c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.scheduler
 
-import org.apache.spark.SparkContext
-
 /**
  * A backend interface for scheduling systems that allows plugging in different ones under
  * ClusterScheduler. We assume a Mesos-like model where the application gets resource offers as
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
index a37ead563271a9b2171b1095eb7589151abe1777..77789031f464a8af20881433790422bd77812a5b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
@@ -24,11 +24,10 @@ import scala.collection.mutable.HashMap
 
 import org.apache.spark._
 import org.apache.spark.executor.ShuffleWriteMetrics
-import org.apache.spark.storage._
-import org.apache.spark.util.{MetadataCleanerType, TimeStampedHashMap, MetadataCleaner}
 import org.apache.spark.rdd.RDD
 import org.apache.spark.rdd.RDDCheckpointData
-
+import org.apache.spark.storage._
+import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap}
 
 private[spark] object ShuffleMapTask {
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
index 129153c732d9a71f8f734cf7d7ca4edc6b55967a..9590c03f10632f44382497c0393ea92fccb11720 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
@@ -18,9 +18,10 @@
 package org.apache.spark.scheduler
 
 import java.util.Properties
-import org.apache.spark.util.{Utils, Distribution}
+
 import org.apache.spark.{Logging, TaskEndReason}
 import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.util.{Distribution, Utils}
 
 sealed trait SparkListenerEvents
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
index 69b42e86eae3e3dd4c737355d143b10bb18185c3..b85b4a50cd93a17780c35b809d4cbc56662c22ae 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/Task.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
@@ -29,7 +29,6 @@ import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.serializer.SerializerInstance
 import org.apache.spark.util.ByteBufferInputStream
 
-
 /**
  * A unit of execution. We have two kinds of Task's in Spark:
  * - [[org.apache.spark.scheduler.ShuffleMapTask]]
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala
index 5190d234d4ee531926086c8ac3673d61806309c8..1481d70db42e1dc6017b21276be44a394cd1e27c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.scheduler
 
 import java.nio.ByteBuffer
+
 import org.apache.spark.util.SerializableBuffer
 
 private[spark] class TaskDescription(
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala
index 91c27d7b8e9d7aeda5c0e33d5a1ef0a47cbed91c..6183b125def99667f8f02ae389c5682e96794b49 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.scheduler
 
-import org.apache.spark.util.Utils
-
 /**
  * Information about a running task attempt inside a TaskSet.
  */
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala
index 35de13c3851876123c6d2b85371ba64851d89af0..ea3229b75be36adb96d11afc5ef9ce97e32e929c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.scheduler
 
-
 private[spark] object TaskLocality extends Enumeration {
   // process local is expected to be used ONLY within tasksetmanager for now.
   val PROCESS_LOCAL, NODE_LOCAL, RACK_LOCAL, ANY = Value
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
index 5724ec9d1b4d7aaa2abc5022f5daf8481bc03d0a..d49d8fb88700782017d724f79691d3ce926e03b9 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
@@ -18,13 +18,14 @@
 package org.apache.spark.scheduler
 
 import java.io._
+import java.nio.ByteBuffer
 
 import scala.collection.mutable.Map
+
+import org.apache.spark.SparkEnv
 import org.apache.spark.executor.TaskMetrics
-import org.apache.spark.{SparkEnv}
-import java.nio.ByteBuffer
-import org.apache.spark.util.Utils
 import org.apache.spark.storage.BlockId
+import org.apache.spark.util.Utils
 
 // Task result. Also contains updates to accumulator variables.
 private[spark] sealed trait TaskResult[T]
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala
index bdec08e968a4528a87ffb8d1b7641f4d5af8bdb8..cb4ad4ae9350cedaacc8713bc4db22f767c605e9 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.scheduler
 
 import java.nio.ByteBuffer
-import java.util.concurrent.{LinkedBlockingDeque, ThreadFactory, ThreadPoolExecutor, TimeUnit}
 
 import org.apache.spark._
 import org.apache.spark.TaskState.TaskState
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 5b525155e9f6229f81b2bddab892e9ad65ba1653..8df37c247d0d4601504ea8d2c25fb80f80343394 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -18,13 +18,13 @@
 package org.apache.spark.scheduler
 
 import java.nio.ByteBuffer
-import java.util.concurrent.atomic.AtomicLong
 import java.util.{TimerTask, Timer}
+import java.util.concurrent.atomic.AtomicLong
 
+import scala.concurrent.duration._
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
-import scala.concurrent.duration._
 
 import org.apache.spark._
 import org.apache.spark.TaskState.TaskState
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
index 21b2ff1682b78635b7ea9e6cf8a76a5182d753cf..1a4b7e599c01eafe094d96471bacefd15316e458 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
@@ -26,13 +26,11 @@ import scala.collection.mutable.HashSet
 import scala.math.max
 import scala.math.min
 
-import org.apache.spark.{ExceptionFailure, ExecutorLostFailure, FetchFailed, Logging, Resubmitted,
-  SparkEnv, Success, TaskEndReason, TaskKilled, TaskResultLost, TaskState}
+import org.apache.spark.{ExceptionFailure, ExecutorLostFailure, FetchFailed, Logging, Resubmitted, SparkEnv, Success, TaskEndReason, TaskKilled, TaskResultLost, TaskState}
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.util.{Clock, SystemClock}
 
-
 /**
  * Schedules the tasks within a single TaskSet in the ClusterScheduler. This class keeps track of
  * each task, retries tasks if they fail (up to a limited number of times), and
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
index 53316dae2a6c81d625184fb991bd51d8967f4889..4a9a1659d825473377d3ff67272ec3e2cc16e2b6 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
@@ -21,8 +21,7 @@ import java.nio.ByteBuffer
 
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.scheduler.TaskDescription
-import org.apache.spark.util.{Utils, SerializableBuffer}
-
+import org.apache.spark.util.{SerializableBuffer, Utils}
 
 private[spark] sealed trait CoarseGrainedClusterMessage extends Serializable
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
index 78204103a9bbdac62b26cbbac0972d33ebbceb65..379e02eb9a437ee76eb920a3c370eb15ac84a304 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
@@ -27,10 +27,8 @@ import akka.actor._
 import akka.pattern.ask
 import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent}
 
-import org.apache.spark.{SparkException, Logging, TaskState}
 import org.apache.spark.{Logging, SparkException, TaskState}
-import org.apache.spark.scheduler.{TaskSchedulerImpl, SchedulerBackend, SlaveLost, TaskDescription,
-  WorkerOffer}
+import org.apache.spark.scheduler.{SchedulerBackend, SlaveLost, TaskDescription, TaskSchedulerImpl, WorkerOffer}
 import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
 import org.apache.spark.util.{AkkaUtils, Utils}
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 04f35cca0826294ef3731d5e5523b3daa2b7b1cb..ee4b65e312abc8cbade3d1ff80f55f978c41a423 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -17,11 +17,9 @@
 
 package org.apache.spark.scheduler.cluster
 
-import scala.collection.mutable.HashMap
-
 import org.apache.spark.{Logging, SparkContext}
-import org.apache.spark.deploy.client.{AppClient, AppClientListener}
 import org.apache.spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.deploy.client.{AppClient, AppClientListener}
 import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SlaveLost, TaskSchedulerImpl}
 import org.apache.spark.util.Utils
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
index 4401f6df47421ae02163c0f9f84a3f94f99c3f4b..28b019d9fd4954afcc7f3891720e65556354531a 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
@@ -18,18 +18,17 @@
 package org.apache.spark.scheduler.cluster.mesos
 
 import java.io.File
-import java.util.{ArrayList => JArrayList, List => JList}
+import java.util.{List => JList}
 import java.util.Collections
 
-import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
 import scala.collection.JavaConversions._
+import scala.collection.mutable.{HashMap, HashSet}
 
-import com.google.protobuf.ByteString
 import org.apache.mesos.{Scheduler => MScheduler}
 import org.apache.mesos._
 import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
 
-import org.apache.spark.{SparkException, Logging, SparkContext, TaskState}
+import org.apache.spark.{Logging, SparkContext, SparkException}
 import org.apache.spark.scheduler.TaskSchedulerImpl
 import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend
 
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
index fef291eea0257cad91eda429e17cdd6887ede2b8..c576beb0c0d38bddc496c8e8c611afe4e4eea5dd 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
@@ -21,17 +21,16 @@ import java.io.File
 import java.util.{ArrayList => JArrayList, List => JList}
 import java.util.Collections
 
-import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
 import scala.collection.JavaConversions._
+import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
 
 import com.google.protobuf.ByteString
 import org.apache.mesos.{Scheduler => MScheduler}
 import org.apache.mesos._
 import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
 
-import org.apache.spark.{Logging, SparkException, SparkContext, TaskState}
-import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SchedulerBackend, SlaveLost,
-  TaskDescription, TaskSchedulerImpl, WorkerOffer}
+import org.apache.spark.{Logging, SparkContext, SparkException, TaskState}
+import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SchedulerBackend, SlaveLost, TaskDescription, TaskSchedulerImpl, WorkerOffer}
 import org.apache.spark.util.Utils
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala
index 897d47a9ad981951e756d58f46545d9c8f42ef4c..50f7e79e97dd842eea05825cceb103861b415430 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala
@@ -21,7 +21,7 @@ import java.nio.ByteBuffer
 
 import akka.actor.{Actor, ActorRef, Props}
 
-import org.apache.spark.{Logging, SparkContext, SparkEnv, TaskState}
+import org.apache.spark.{Logging, SparkEnv, TaskState}
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.executor.{Executor, ExecutorBackend}
 import org.apache.spark.scheduler.{SchedulerBackend, TaskSchedulerImpl, WorkerOffer}
diff --git a/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
index 5d3d43623d9d2368b7527c12dcf165f012f0943c..33c1705ad7c5832184f225a13cc812b6164114b9 100644
--- a/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
@@ -20,8 +20,8 @@ package org.apache.spark.serializer
 import java.io._
 import java.nio.ByteBuffer
 
-import org.apache.spark.util.ByteBufferInputStream
 import org.apache.spark.SparkConf
+import org.apache.spark.util.ByteBufferInputStream
 
 private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream {
   val objOut = new ObjectOutputStream(out)
diff --git a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
index 2d0b25538505c0f0bdb986867768be9ca65a2a7b..920490f9d0d616fa0a5ab5ce226436f2bf29bcd5 100644
--- a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
@@ -17,13 +17,13 @@
 
 package org.apache.spark.serializer
 
-import java.nio.ByteBuffer
 import java.io.{EOFException, InputStream, OutputStream}
+import java.nio.ByteBuffer
 
-import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer}
-import com.esotericsoftware.kryo.{KryoException, Kryo}
+import com.esotericsoftware.kryo.{Kryo, KryoException}
 import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
-import com.twitter.chill.{EmptyScalaKryoInstantiator, AllScalaRegistrar}
+import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer}
+import com.twitter.chill.{AllScalaRegistrar, EmptyScalaKryoInstantiator}
 
 import org.apache.spark._
 import org.apache.spark.broadcast.HttpBroadcast
diff --git a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
index a38a2b59dbc23b26bad167ba99632f0856fd84fa..16677ab54be04fb3ad5e5af4adf0ac8ea41e28b2 100644
--- a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
@@ -22,8 +22,7 @@ import java.nio.ByteBuffer
 
 import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
 
-import org.apache.spark.util.{NextIterator, ByteBufferInputStream}
-
+import org.apache.spark.util.{ByteBufferInputStream, NextIterator}
 
 /**
  * A serializer. Because some serialization libraries are not thread safe, this class is used to
diff --git a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
index 36a37af4f821daddfcf0ac84db02a266f3ee09c5..65ac0155f45e72f5c9ca55251ffbd81fde2f929a 100644
--- a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
@@ -18,8 +18,8 @@
 package org.apache.spark.serializer
 
 import java.util.concurrent.ConcurrentHashMap
-import org.apache.spark.SparkConf
 
+import org.apache.spark.SparkConf
 
 /**
  * A service that returns a serializer object given the serializer's class name. If a previous
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
index aa62ab5aba1c2a7c3b972c6e9676feacf7191aea..925022e7fe6fb96705d0483203b161b16d113ee8 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.storage
 
-import java.nio.ByteBuffer
 import java.util.concurrent.LinkedBlockingQueue
 
 import scala.collection.mutable.ArrayBuffer
@@ -26,15 +25,13 @@ import scala.collection.mutable.Queue
 
 import io.netty.buffer.ByteBuf
 
-import org.apache.spark.Logging
-import org.apache.spark.SparkException
+import org.apache.spark.{Logging, SparkException}
 import org.apache.spark.network.BufferMessage
 import org.apache.spark.network.ConnectionManagerId
 import org.apache.spark.network.netty.ShuffleCopier
 import org.apache.spark.serializer.Serializer
 import org.apache.spark.util.Utils
 
-
 /**
  * A block fetcher iterator interface. There are two implementations:
  *
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
index 780a3a15dd15b3724348ccaacea0c9a9d0cc8656..a734ddc1ef7027b1ba91d1c6e66e600048389716 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
@@ -20,24 +20,21 @@ package org.apache.spark.storage
 import java.io.{File, InputStream, OutputStream}
 import java.nio.{ByteBuffer, MappedByteBuffer}
 
-import scala.collection.mutable.{HashMap, ArrayBuffer}
-import scala.util.Random
-
-import akka.actor.{ActorSystem, Cancellable, Props}
+import scala.collection.mutable.{ArrayBuffer, HashMap}
 import scala.concurrent.{Await, Future}
-import scala.concurrent.duration.Duration
 import scala.concurrent.duration._
+import scala.util.Random
 
+import akka.actor.{ActorSystem, Cancellable, Props}
 import it.unimi.dsi.fastutil.io.{FastBufferedOutputStream, FastByteArrayOutputStream}
+import sun.nio.ch.DirectBuffer
 
-import org.apache.spark.{SparkConf, Logging, SparkEnv, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkEnv, SparkException}
 import org.apache.spark.io.CompressionCodec
 import org.apache.spark.network._
 import org.apache.spark.serializer.Serializer
 import org.apache.spark.util._
 
-import sun.nio.ch.DirectBuffer
-
 private[spark] class BlockManager(
     executorId: String,
     actorSystem: ActorSystem,
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
index 74207f59af17078fea9a7997d5d12456397812aa..98cd6e68fa7241dec3017d85153d57126196ca4e 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
@@ -19,6 +19,7 @@ package org.apache.spark.storage
 
 import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
 import java.util.concurrent.ConcurrentHashMap
+
 import org.apache.spark.util.Utils
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
index c54e4f2664753c8a8f3f79b0a9e4dd2c98604612..e531467cccb40567fd3adfaa7388874c21ff63d1 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
@@ -23,7 +23,7 @@ import scala.concurrent.ExecutionContext.Implicits.global
 import akka.actor._
 import akka.pattern.ask
 
-import org.apache.spark.{SparkConf, Logging, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.storage.BlockManagerMessages._
 import org.apache.spark.util.AkkaUtils
 
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
index 893418fb8cad9390d967f95f6fda6bd4ab0e91c0..a999d76a326a68e12b59f1ffe49aa4dfa5bc7f05 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
@@ -27,7 +27,7 @@ import scala.concurrent.duration._
 import akka.actor.{Actor, ActorRef, Cancellable}
 import akka.pattern.ask
 
-import org.apache.spark.{SparkConf, Logging, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.storage.BlockManagerMessages._
 import org.apache.spark.util.{AkkaUtils, Utils}
 
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
index 45f51da2885481d3e4e23e97384734c6cd5245bc..bbb9529b5a0cab9a7fb7100a292e87050297b799 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
@@ -21,7 +21,6 @@ import java.io.{Externalizable, ObjectInput, ObjectOutput}
 
 import akka.actor.ActorRef
 
-
 private[storage] object BlockManagerMessages {
   //////////////////////////////////////////////////////////////////////////////////
   // Messages from the master to slaves.
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
index 3a65e5573383451bdc3d3a2a1f3e4d94a1c407f9..bcfb82d3c733680ef27a3ce79b3e35b52038bbe8 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
@@ -21,7 +21,6 @@ import akka.actor.Actor
 
 import org.apache.spark.storage.BlockManagerMessages._
 
-
 /**
  * An actor to take commands from the master to execute options. For example,
  * this is used to remove blocks from the slave's BlockManager.
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
index 7cf754fb204c2d1e894debe79c1d382952d90ae0..687586490abfe413f2aeb37f0f258a58cdefa924 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
@@ -19,9 +19,8 @@ package org.apache.spark.storage
 
 import com.codahale.metrics.{Gauge,MetricRegistry}
 
-import org.apache.spark.metrics.source.Source
 import org.apache.spark.SparkContext
-
+import org.apache.spark.metrics.source.Source
 
 private[spark] class BlockManagerSource(val blockManager: BlockManager, sc: SparkContext)
     extends Source {
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
index 3efe738a08f666a883c6cacb3f23a95873aeb441..c7766a3a65671776786ee6fe9d43c1b12e4cbedf 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
@@ -19,7 +19,7 @@ package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 
-import org.apache.spark.{Logging}
+import org.apache.spark.Logging
 import org.apache.spark.network._
 import org.apache.spark.util.Utils
 
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
index fbafcf79d28339af7cebfe34bc827ff83dcbee43..7168ae18c2615eb8251b7e6cbae9181264142ffc 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
@@ -19,8 +19,8 @@ package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 
-import scala.collection.mutable.StringBuilder
 import scala.collection.mutable.ArrayBuffer
+import scala.collection.mutable.StringBuilder
 
 import org.apache.spark.network._
 
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockStore.scala b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
index ea426562402ae4ef4a16223f4c12ea862528ad9b..b047644b88f488c2318cd73b2aefb2b211eb4b4a 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.storage
 
 import java.nio.ByteBuffer
+
 import scala.collection.mutable.ArrayBuffer
 
 import org.apache.spark.Logging
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
index 5a1e7b44440fdac533ae6256ba61c33d70552b7d..d1f07ddb24bb2a79a1f11d252296f7ec996ed265 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
@@ -27,7 +27,6 @@ import org.apache.spark.Logging
 import org.apache.spark.serializer.Serializer
 import org.apache.spark.util.Utils
 
-
 /**
  * Stores BlockManager blocks on disk.
  */
diff --git a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
index eb5a18521683e2be27d4ae0fbaa2eb499e945a50..18141756518c50b4e5c7d829e13494c53def027b 100644
--- a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.storage
 
-import java.util.LinkedHashMap
-import java.util.concurrent.ArrayBlockingQueue
 import java.nio.ByteBuffer
-import collection.mutable.ArrayBuffer
+import java.util.LinkedHashMap
+
+import scala.collection.mutable.ArrayBuffer
+
 import org.apache.spark.util.{SizeEstimator, Utils}
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala b/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala
index 40734aab49f93107b512f1affe324baaefb08509..8cea302eb14c333ed254cdf8394acc418b7ea420 100644
--- a/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala
@@ -17,11 +17,11 @@
 
 package org.apache.spark.storage
 
-import java.util.concurrent.atomic.AtomicLong
 import java.util.concurrent.{CountDownLatch, Executors}
+import java.util.concurrent.atomic.AtomicLong
 
-import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.SparkContext
+import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.util.Utils
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
index 50a0cdb3095cd91adf424f5ad168a18ec75cb4cf..2d88a40fbb3f24b4c09b20f6c79cbb485990bbf1 100644
--- a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.storage
 
-import org.apache.spark.{SparkContext}
-import BlockManagerMasterActor.BlockStatus
+import org.apache.spark.SparkContext
+import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus
 import org.apache.spark.util.Utils
 
 private[spark]
diff --git a/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
index 729ba2c550a20bc949803aeff589e9bdd1e2b39c..1d81d006c0b29c7f6931c00f2a6b69194d81085d 100644
--- a/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
@@ -17,12 +17,13 @@
 
 package org.apache.spark.storage
 
-import akka.actor._
-
 import java.util.concurrent.ArrayBlockingQueue
+
+import akka.actor._
 import util.Random
+
+import org.apache.spark.SparkConf
 import org.apache.spark.serializer.KryoSerializer
-import org.apache.spark.{SparkConf, SparkContext}
 
 /**
  * This class tests the BlockManager and MemoryStore for thread safety and
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index ade8ba1323b80004af7d335df22698826aac3595..1f048a84cdfb6851958a7a01893709caac84534c 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -17,21 +17,19 @@
 
 package org.apache.spark.ui
 
+import java.net.InetSocketAddress
 import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
 
 import scala.annotation.tailrec
-import scala.util.{Try, Success, Failure}
+import scala.util.{Failure, Success, Try}
 import scala.xml.Node
 
 import net.liftweb.json.{JValue, pretty, render}
-
-import org.eclipse.jetty.server.{Server, Request, Handler}
-import org.eclipse.jetty.server.handler.{ResourceHandler, HandlerList, ContextHandler, AbstractHandler}
+import org.eclipse.jetty.server.{Handler, Request, Server}
+import org.eclipse.jetty.server.handler.{AbstractHandler, ContextHandler, HandlerList, ResourceHandler}
 import org.eclipse.jetty.util.thread.QueuedThreadPool
 
 import org.apache.spark.Logging
-import java.net.InetSocketAddress
-
 
 /** Utilities for launching a web server using Jetty's HTTP Server class */
 private[spark] object JettyUtils extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 0196f43d7431b7f6bfc5d12b333f6a7f61118702..af6b65860e0065b818c24745af0d48d9d48e8742 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -17,16 +17,14 @@
 
 package org.apache.spark.ui
 
-import javax.servlet.http.HttpServletRequest
-
 import org.eclipse.jetty.server.{Handler, Server}
 
 import org.apache.spark.{Logging, SparkContext, SparkEnv}
+import org.apache.spark.ui.JettyUtils._
 import org.apache.spark.ui.env.EnvironmentUI
 import org.apache.spark.ui.exec.ExecutorsUI
-import org.apache.spark.ui.storage.BlockManagerUI
 import org.apache.spark.ui.jobs.JobProgressUI
-import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.storage.BlockManagerUI
 import org.apache.spark.util.Utils
 
 /** Top level user interface for Spark */
diff --git a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
index f913ee461b219cfb68f0d09e53c3b24cad16f05d..18d2b5075aa08346f98c5de1f0eed37a18ce834a 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
@@ -23,7 +23,6 @@ import org.apache.spark.{SparkConf, SparkContext}
 import org.apache.spark.SparkContext._
 import org.apache.spark.scheduler.SchedulingMode
 
-
 /**
  * Continuously generates jobs that expose various features of the WebUI (internal testing tool).
  *
diff --git a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
index 88f41be8d3dd2df802076b9c642ed6e8cf00a471..9e7cdc88162e8618c3ef73b03924bc9431603193 100644
--- a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
@@ -25,11 +25,10 @@ import scala.xml.Node
 
 import org.eclipse.jetty.server.Handler
 
+import org.apache.spark.SparkContext
 import org.apache.spark.ui.JettyUtils._
-import org.apache.spark.ui.UIUtils
 import org.apache.spark.ui.Page.Environment
-import org.apache.spark.SparkContext
-
+import org.apache.spark.ui.UIUtils
 
 private[spark] class EnvironmentUI(sc: SparkContext) {
 
diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
index 4e41acf0230f9cd5042ce3d0454da6c623de5667..1f3b7a4c231b6a9d48e578e6b6076a5cdac29782 100644
--- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
@@ -26,14 +26,13 @@ import org.eclipse.jetty.server.Handler
 
 import org.apache.spark.{ExceptionFailure, Logging, SparkContext}
 import org.apache.spark.executor.TaskMetrics
-import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
+import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd, SparkListenerTaskStart}
 import org.apache.spark.scheduler.TaskInfo
 import org.apache.spark.ui.JettyUtils._
 import org.apache.spark.ui.Page.Executors
 import org.apache.spark.ui.UIUtils
 import org.apache.spark.util.Utils
 
-
 private[spark] class ExecutorsUI(val sc: SparkContext) {
 
   private var _listener: Option[ExecutorsListener] = None
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
index ab03eb5ce1ab42fc86df6956b47c74f4c54ac64d..d012ba4dbb3dbd7db6ed7a131ec9d291d3e99248 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala
@@ -17,11 +17,11 @@
 
 package org.apache.spark.ui.jobs
 
+import scala.collection.mutable
 import scala.xml.Node
 
 import org.apache.spark.scheduler.SchedulingMode
 import org.apache.spark.util.Utils
-import scala.collection.mutable
 
 /** Page showing executor summary */
 private[spark] class ExecutorTable(val parent: JobProgressUI, val stageId: Int) {
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
index 6289f8744f24090e7c3cce8b8116daca72466eab..81713edcf5db2e188be596f53ea6c3c4693a45ea 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
@@ -25,7 +25,6 @@ import org.apache.spark.scheduler.SchedulingMode
 import org.apache.spark.ui.Page._
 import org.apache.spark.ui.UIUtils._
 
-
 /** Page showing list of all ongoing and recently finished stages and pools*/
 private[spark] class IndexPage(parent: JobProgressUI) {
   def listener = parent.listener
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
index 858a10ce750fffb8136637edbb512d4ecc7491c6..07a08f5277d1972596930fff2d9dfb140b05acc0 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.ui.jobs
 
-import scala.Seq
 import scala.collection.mutable.{ListBuffer, HashMap, HashSet}
 
 import org.apache.spark.{ExceptionFailure, SparkContext, Success}
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
index c1ee2f3d00d666d9dad49e3fa937773667d35d01..557bce6b6635394f5cf101cbafca3a8ce4b273d4 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
@@ -17,23 +17,15 @@
 
 package org.apache.spark.ui.jobs
 
-import scala.concurrent.duration._
-
 import java.text.SimpleDateFormat
-
 import javax.servlet.http.HttpServletRequest
 
-import org.eclipse.jetty.server.Handler
-
 import scala.Seq
-import scala.collection.mutable.{HashSet, ListBuffer, HashMap, ArrayBuffer}
 
+import org.eclipse.jetty.server.Handler
+
+import org.apache.spark.SparkContext
 import org.apache.spark.ui.JettyUtils._
-import org.apache.spark.{ExceptionFailure, SparkContext, Success}
-import org.apache.spark.scheduler._
-import collection.mutable
-import org.apache.spark.scheduler.SchedulingMode
-import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
 import org.apache.spark.util.Utils
 
 /** Web UI showing progress status of all jobs in the given SparkContext. */
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
index 89fffcb80d0d745f258c1d9cf9494790fadcb085..eb7518a0208405eaa6e0cb71e2799f2f4463b0be 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
@@ -19,12 +19,10 @@ package org.apache.spark.ui.jobs
 
 import javax.servlet.http.HttpServletRequest
 
-import scala.xml.{NodeSeq, Node}
-import scala.collection.mutable.HashSet
+import scala.xml.Node
 
-import org.apache.spark.scheduler.Stage
-import org.apache.spark.ui.UIUtils._
 import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
 
 /** Page showing specific pool details */
 private[spark] class PoolPage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index b6e98942ab811bea1f8db5fb80bd1c5aa04584fc..ddc687a45a0958160882c17eb2f15ce29c8d1524 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -18,17 +18,16 @@
 package org.apache.spark.ui.jobs
 
 import java.util.Date
-
 import javax.servlet.http.HttpServletRequest
 
 import scala.xml.Node
 
-import org.apache.spark.{ExceptionFailure}
+import org.apache.spark.ExceptionFailure
 import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.scheduler.TaskInfo
 import org.apache.spark.ui.UIUtils._
 import org.apache.spark.ui.Page._
 import org.apache.spark.util.{Utils, Distribution}
-import org.apache.spark.scheduler.TaskInfo
 
 /** Page showing statistics and task list for a given stage */
 private[spark] class StagePage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
index 999a94fc2d008aba97443d41c48af35163bcc167..c5fd3ae16dc9fe549da0dd0498a1d6db6c04314f 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
@@ -19,14 +19,13 @@ package org.apache.spark.ui.jobs
 
 import java.util.Date
 
-import scala.xml.Node
 import scala.collection.mutable.HashSet
+import scala.xml.Node
 
 import org.apache.spark.scheduler.{SchedulingMode, StageInfo, TaskInfo}
 import org.apache.spark.ui.UIUtils
 import org.apache.spark.util.Utils
 
-
 /** Page showing list of all ongoing and recently finished stages */
 private[spark] class StageTable(val stages: Seq[StageInfo], val parent: JobProgressUI) {
 
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
index 39f422dd6b90f70684ed91b2dc95c6b09bccc336..dc18eab74e0da6ff0ecd751f1e6708bbc840db99 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.ui.storage
 
-import scala.concurrent.duration._
-
 import javax.servlet.http.HttpServletRequest
 
 import org.eclipse.jetty.server.Handler
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
index 109a7d4094c0a4bee6390d16d81871b477b1b399..6a3c41fb1155da78036dcafda2e4423cc2c70d3f 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
@@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest
 import scala.xml.Node
 
 import org.apache.spark.storage.{RDDInfo, StorageUtils}
-import org.apache.spark.ui.UIUtils._
 import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
 import org.apache.spark.util.Utils
 
 /** Page showing list of RDD's currently stored in the cluster */
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
index b83cd54f3c39a844d1e209075b9fa8fe24ba083b..78b149b14b1d4bf0f7f8cdd4a1879ae634f18b27 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
@@ -23,11 +23,10 @@ import scala.xml.Node
 
 import org.apache.spark.storage.{BlockId, StorageStatus, StorageUtils}
 import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus
-import org.apache.spark.ui.UIUtils._
 import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
 import org.apache.spark.util.Utils
 
-
 /** Page showing storage details for a given RDD */
 private[spark] class RDDPage(parent: BlockManagerUI) {
   val sc = parent.sc
diff --git a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
index 761d378c7fd8b5f264151f54a2a78aac435e6368..f26ed47e5804643160042d6774eaf28c7b5a92ce 100644
--- a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
+++ b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
@@ -22,8 +22,8 @@ import scala.concurrent.duration.{Duration, FiniteDuration}
 
 import akka.actor.{ActorSystem, ExtendedActorSystem, IndestructibleActorSystem}
 import com.typesafe.config.ConfigFactory
-
 import org.apache.log4j.{Level, Logger}
+
 import org.apache.spark.SparkConf
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
index a38329df037a14ae6ec0e4bba7278990a4243480..c3692f2fd929ba6ad6d762e96021ae2a9e6668ab 100644
--- a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
+++ b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
@@ -19,8 +19,9 @@ package org.apache.spark.util
 
 import java.io.Serializable
 import java.util.{PriorityQueue => JPriorityQueue}
-import scala.collection.generic.Growable
+
 import scala.collection.JavaConverters._
+import scala.collection.generic.Growable
 
 /**
  * Bounded priority queue. This class wraps the original PriorityQueue
diff --git a/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
index e214d2a519a20d94281af8e66f6c4d7716e7bbdf..54de4d4ee8ca750bc1ea0292b803e4a81e5f54b2 100644
--- a/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
+++ b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
@@ -19,6 +19,7 @@ package org.apache.spark.util
 
 import java.io.InputStream
 import java.nio.ByteBuffer
+
 import org.apache.spark.storage.BlockManager
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
index c0c057be8defc3fe93dab818bb6fae044d61e75e..681d0a30cb3f885fe7938f779113aebbd2e2eed7 100644
--- a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.util
 
-import java.lang.reflect.Field
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
 
 import scala.collection.mutable.Map
 import scala.collection.mutable.Set
 
 import org.objectweb.asm.{ClassReader, ClassVisitor, MethodVisitor, Type}
 import org.objectweb.asm.Opcodes._
-import java.io.{ByteArrayOutputStream, ByteArrayInputStream}
+
 import org.apache.spark.Logging
 
 private[spark] object ClosureCleaner extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
index 3868ab36312a78fc460c031cac52a0b3c2d55d7e..0448919e091614926fc229b650e5b7c78fcdba08 100644
--- a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.util
 
-import java.util.{TimerTask, Timer}
-import org.apache.spark.{SparkConf, Logging}
+import java.util.{Timer, TimerTask}
 
+import org.apache.spark.{Logging, SparkConf}
 
 /**
  * Runs a timer task to periodically clean up metadata (e.g. old files or hashtable entries)
diff --git a/core/src/main/scala/org/apache/spark/util/MutablePair.scala b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
index 34f1f6606fc3fbbdfca834c7843a6d9f2fe4f7c8..b053266f12748f800223096ba41fbd0ecd8ecb03 100644
--- a/core/src/main/scala/org/apache/spark/util/MutablePair.scala
+++ b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.util
 
-
 /**
  * A tuple of 2 elements. This can be used as an alternative to Scala's Tuple2 when we want to
  * minimize object allocation.
diff --git a/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
index f2b1ad7d0e91d667cd01b7afc16e91eb78d3fcea..2b452ad33b021c5818ba041152572ae55a1f3ca9 100644
--- a/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
+++ b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.util
 
+import java.io.{EOFException, IOException, ObjectInputStream, ObjectOutputStream}
 import java.nio.ByteBuffer
-import java.io.{IOException, ObjectOutputStream, EOFException, ObjectInputStream}
 import java.nio.channels.Channels
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala b/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala
index 2110b3596ee2ac42c7613ba45d1123916121d2d9..21a88eea3bbc2694fbd9f8cd6f2016eb5bc1f54a 100644
--- a/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala
+++ b/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala
@@ -17,8 +17,9 @@
 
 package org.apache.spark.util
 
-import java.io.{Externalizable, ObjectOutput, ObjectInput}
-import com.clearspring.analytics.stream.cardinality.{ICardinality, HyperLogLog}
+import java.io.{Externalizable, ObjectInput, ObjectOutput}
+
+import com.clearspring.analytics.stream.cardinality.{HyperLogLog, ICardinality}
 
 /**
  * A wrapper around [[com.clearspring.analytics.stream.cardinality.HyperLogLog]] that is
diff --git a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
index 17c6481c18463fc60e802bb4a9afcf883060e52e..b955612ca774909d055d2af97a23d6c05e47e6c6 100644
--- a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
+++ b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
@@ -17,20 +17,19 @@
 
 package org.apache.spark.util
 
+import java.lang.management.ManagementFactory
+import java.lang.reflect.{Array => JArray}
 import java.lang.reflect.Field
 import java.lang.reflect.Modifier
-import java.lang.reflect.{Array => JArray}
 import java.util.IdentityHashMap
-import java.util.concurrent.ConcurrentHashMap
 import java.util.Random
-
-import javax.management.MBeanServer
-import java.lang.management.ManagementFactory
+import java.util.concurrent.ConcurrentHashMap
 
 import scala.collection.mutable.ArrayBuffer
 
 import it.unimi.dsi.fastutil.ints.IntOpenHashSet
-import org.apache.spark.{SparkEnv, SparkConf, SparkContext, Logging}
+
+import org.apache.spark.Logging
 
 /**
  * Estimates the sizes of Java objects (number of bytes of memory they occupy), for use in
diff --git a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
index 8e07a0f29addf7c1246c1850737830fc42dde5a3..ddbd084ed7f013577b4de7f1c96bcfe2b5b3352a 100644
--- a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
@@ -18,10 +18,11 @@
 package org.apache.spark.util
 
 import java.util.concurrent.ConcurrentHashMap
+
 import scala.collection.JavaConversions
-import scala.collection.mutable.Map
 import scala.collection.immutable
-import org.apache.spark.scheduler.MapStatus
+import scala.collection.mutable.Map
+
 import org.apache.spark.Logging
 
 /**
diff --git a/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
index 26983138ff0da984c3e97b4149ec4ebd16b712e8..19bece86b36b4df0c63452ed1677f370035c2891 100644
--- a/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark.util
 
-import scala.collection.mutable.Set
-import scala.collection.JavaConversions
 import java.util.concurrent.ConcurrentHashMap
 
+import scala.collection.JavaConversions
+import scala.collection.mutable.Set
 
 class TimeStampedHashSet[A] extends Set[A] {
   val internalMap = new ConcurrentHashMap[A, Long]()
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 8749ab7875bb793f71b922001352a7ed6bb89519..8e69f1d3351b50a502cf383c5a0d4aabeacadb60 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -18,7 +18,8 @@
 package org.apache.spark.util
 
 import java.io._
-import java.net.{InetAddress, URL, URI, NetworkInterface, Inet4Address}
+import java.net.{InetAddress, Inet4Address, NetworkInterface, URI, URL}
+import java.nio.ByteBuffer
 import java.util.{Locale, Random, UUID}
 import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadPoolExecutor}
 
@@ -30,16 +31,11 @@ import scala.reflect.ClassTag
 
 import com.google.common.io.Files
 import com.google.common.util.concurrent.ThreadFactoryBuilder
+import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
 
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.{Path, FileSystem, FileUtil}
-import org.apache.hadoop.io._
-
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
 import org.apache.spark.deploy.SparkHadoopUtil
-import java.nio.ByteBuffer
-import org.apache.spark.{SparkConf, SparkException, Logging}
-
 
 /**
  * Various utility methods used by Spark.
diff --git a/core/src/main/scala/org/apache/spark/util/Vector.scala b/core/src/main/scala/org/apache/spark/util/Vector.scala
index 96da93d8545770c5c7d1f85a9a804f3ee281ecf7..d437c055f33d436354f0439cfa5178eb6fab1d02 100644
--- a/core/src/main/scala/org/apache/spark/util/Vector.scala
+++ b/core/src/main/scala/org/apache/spark/util/Vector.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.util
 
 import scala.util.Random
+
 import org.apache.spark.util.random.XORShiftRandom
 
 class Vector(val elements: Array[Double]) extends Serializable {
diff --git a/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala b/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala
index c9cf51284387529ae2f820f5ed59a077cbbc88d2..d3153d2cac4a50a41d6cdedfc3ebc8d3cc2bc6fc 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.util.collection
 
-
 /**
  * A simple, fixed-size bit set implementation. This implementation is fast because it avoids
  * safety/bound checking.
diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
index 59ba1e457ce115319bec8a99786022c1b4a3f5c0..856d092ab3463410a25c3e842490a5939f63d62a 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala
@@ -23,8 +23,8 @@ import java.util.Comparator
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
 
-import it.unimi.dsi.fastutil.io.FastBufferedInputStream
 import com.google.common.io.ByteStreams
+import it.unimi.dsi.fastutil.io.FastBufferedInputStream
 
 import org.apache.spark.{Logging, SparkEnv}
 import org.apache.spark.serializer.Serializer
diff --git a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
index 6b66d54751987fe8920f0677c872c3a677bf735d..0f1fca4813ba995ae60e91147992355a8ebdc097 100644
--- a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
+++ b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.util.random
 
 import java.util.Random
+
 import cern.jet.random.Poisson
 import cern.jet.random.engine.DRand
 
diff --git a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
index 20d32d01b5e1918ed9473db4daaf551b08f8c59a..ca611b67ed91d2bb22f80521ea84465ed799d83c 100644
--- a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
+++ b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.util.random
 
 import java.util.{Random => JavaRandom}
+
 import org.apache.spark.util.Utils.timeIt
 
 /**
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index c443c5266e42b6b5baa3b02aa448d4bac1457bc7..6c73ea6949dd24413acdc77cf116b5c1e3e5eabf 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -17,12 +17,11 @@
 
 package org.apache.spark
 
+import scala.collection.mutable
+
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
-import collection.mutable
-import java.util.Random
-import scala.math.exp
-import scala.math.signum
+
 import org.apache.spark.SparkContext._
 
 class AccumulatorSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index ec13b329b25a8534078e5a55da8fe25801f3e0ca..d2e29f20f0b08b3ac4043f0d7563ce45f099a207 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -17,11 +17,14 @@
 
 package org.apache.spark
 
+import java.io.File
+
 import scala.reflect.ClassTag
+
 import org.scalatest.FunSuite
-import java.io.File
-import org.apache.spark.rdd._
+
 import org.apache.spark.SparkContext._
+import org.apache.spark.rdd._
 import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
 import org.apache.spark.util.Utils
 
diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 8de7a328d1cf5eadd5f46ecf470ebd109e660186..14ddd6f1ec08fd0cfd996d6cd43f43e2145366dc 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -17,17 +17,16 @@
 
 package org.apache.spark
 
-import network.ConnectionManagerId
 import org.scalatest.BeforeAndAfter
-import org.scalatest.concurrent.Timeouts._
 import org.scalatest.FunSuite
+import org.scalatest.concurrent.Timeouts._
 import org.scalatest.matchers.ShouldMatchers
-import org.scalatest.time.{Span, Millis}
+import org.scalatest.time.{Millis, Span}
 
-import SparkContext._
+import org.apache.spark.SparkContext._
+import org.apache.spark.network.ConnectionManagerId
 import org.apache.spark.storage.{BlockManagerWorker, GetBlock, RDDBlockId, StorageLevel}
 
-
 class NotSerializableClass
 class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {}
 
diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
index fb89537258542245c0d0584f3d69c33f9204a875..e0e80112786499fb83c394f1a6a5a14eda878ddd 100644
--- a/core/src/test/scala/org/apache/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -26,6 +26,7 @@ import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.prop.TableDrivenPropertyChecks._
 import org.scalatest.time.SpanSugar._
+
 import org.apache.spark.util.Utils
 
 class DriverSuite extends FunSuite with Timeouts {
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index befdc1589f009de6c16e53280f9725bd25d1ccf2..ac3c86778d526034fc25465fe615d1e17e7cae32 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark
 
 import org.scalatest.FunSuite
 
-import SparkContext._
+import org.apache.spark.SparkContext._
 import org.apache.spark.util.NonSerializable
 
 // Common state shared by FailureSuite-launched tasks. We use a global object
diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index a2eb9a4e84696c9326c5523e673add86b9d620d8..9be67b3c95abdf853b9a95f7bb2432d5117946d9 100644
--- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -20,10 +20,11 @@ package org.apache.spark
 import java.io._
 import java.util.jar.{JarEntry, JarOutputStream}
 
-import SparkContext._
 import com.google.common.io.Files
 import org.scalatest.FunSuite
 
+import org.apache.spark.SparkContext._
+
 class FileServerSuite extends FunSuite with LocalSparkContext {
 
   @transient var tmpFile: File = _
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 7b82a4cdd9cfd2b5f5b06239d5fc96d15bcade08..8ff02aef67aa008f27e8c33bb21ddd07ceb5704f 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -17,17 +17,16 @@
 
 package org.apache.spark
 
-import java.io.{FileWriter, PrintWriter, File}
+import java.io.{File, FileWriter}
 
 import scala.io.Source
 
 import com.google.common.io.Files
-import org.scalatest.FunSuite
 import org.apache.hadoop.io._
-import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodec, GzipCodec}
-
+import org.apache.hadoop.io.compress.DefaultCodec
+import org.scalatest.FunSuite
 
-import SparkContext._
+import org.apache.spark.SparkContext._
 
 class FileSuite extends FunSuite with LocalSparkContext {
 
diff --git a/core/src/test/scala/org/apache/spark/JavaAPISuite.java b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
index 8c573ac0d65e0847c4218880d8bdeaf6dbf7b7fc..20232e9fbb8d0c58e8de8e510eb2670c87ac04c9 100644
--- a/core/src/test/scala/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
@@ -22,14 +22,14 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.*;
 
-import com.google.common.base.Optional;
 import scala.Tuple2;
 
+import com.google.common.base.Optional;
 import com.google.common.base.Charsets;
-import org.apache.hadoop.io.compress.DefaultCodec;
 import com.google.common.io.Files;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapreduce.Job;
@@ -48,7 +48,6 @@ import org.apache.spark.partial.PartialResult;
 import org.apache.spark.storage.StorageLevel;
 import org.apache.spark.util.StatCounter;
 
-
 // The test suite itself is Serializable so that anonymous Function implementations can be
 // serialized, as an alternative to converting these anonymous classes to static inner classes;
 // see http://stackoverflow.com/questions/758570/.
diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
index 1121e06e2e6cc5b630f7ad5fc9020b8bfd6853a2..20c503d30c80101e63c218761c95e20044c3edeb 100644
--- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
@@ -20,9 +20,9 @@ package org.apache.spark
 import java.util.concurrent.Semaphore
 
 import scala.concurrent.Await
+import scala.concurrent.ExecutionContext.Implicits.global
 import scala.concurrent.duration.Duration
 import scala.concurrent.future
-import scala.concurrent.ExecutionContext.Implicits.global
 
 import org.scalatest.{BeforeAndAfter, FunSuite}
 import org.scalatest.matchers.ShouldMatchers
@@ -30,7 +30,6 @@ import org.scalatest.matchers.ShouldMatchers
 import org.apache.spark.SparkContext._
 import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListener}
 
-
 /**
  * Test suite for cancelling running jobs. We run the cancellation tasks for single job action
  * (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers
diff --git a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index 3ac706110e287dfb6bd15978af9c53b6013b7ac2..4b972f88a95424ae9b23f3d87a39aafe08784356 100644
--- a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -17,12 +17,11 @@
 
 package org.apache.spark
 
-import org.scalatest.Suite
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.BeforeAndAfterAll
-
 import org.jboss.netty.logging.InternalLoggerFactory
 import org.jboss.netty.logging.Slf4JLoggerFactory
+import org.scalatest.BeforeAndAfterAll
+import org.scalatest.BeforeAndAfterEach
+import org.scalatest.Suite
 
 /** Manages a local `sc` {@link SparkContext} variable, correctly stopping it after each test. */
 trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { self: Suite =>
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index 930c2523caf8c0adc2ced32532b5e54bff090fbf..6c1e325f6f348535b9480062e37165bf5ff826cb 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -17,13 +17,14 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
+import scala.concurrent.Await
 
 import akka.actor._
+import org.scalatest.FunSuite
+
 import org.apache.spark.scheduler.MapStatus
 import org.apache.spark.storage.BlockManagerId
 import org.apache.spark.util.AkkaUtils
-import scala.concurrent.Await
 
 class MapOutputTrackerSuite extends FunSuite with LocalSparkContext {
   private val conf = new SparkConf
diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index 1c5d5ea4364f52ce8d6baf5a966ff505065363f2..4305686d3a6d5f564d55c0a452ff4d4a5c7c286d 100644
--- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -18,13 +18,12 @@
 package org.apache.spark
 
 import scala.math.abs
-import scala.collection.mutable.ArrayBuffer
 
 import org.scalatest.{FunSuite, PrivateMethodTester}
 
 import org.apache.spark.SparkContext._
-import org.apache.spark.util.StatCounter
 import org.apache.spark.rdd.RDD
+import org.apache.spark.util.StatCounter
 
 class PartitioningSuite extends FunSuite with SharedSparkContext with PrivateMethodTester {
 
diff --git a/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
index 2e851d892dd0d5f4ec3628a2d945c172d3b16d0b..3a0385a1b0bd988ebff1650112a900829b7cde8a 100644
--- a/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
@@ -18,7 +18,6 @@
 package org.apache.spark
 
 import org.scalatest.FunSuite
-import SparkContext._
 
 class PipedRDDSuite extends FunSuite with SharedSparkContext {
 
diff --git a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
index c650ef4ed58cb3b015accb3951c90f7bc88d5c41..0b6511a80df1dd6eaf934709864d9dd0ca4827a4 100644
--- a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark
 
-import org.scalatest.Suite
 import org.scalatest.BeforeAndAfterAll
+import org.scalatest.Suite
 
 /** Shares a local `SparkContext` between all tests in a suite and closes it at the end */
 trait SharedSparkContext extends BeforeAndAfterAll { self: Suite =>
diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index e121b162ad9e6243c437a32473603e496e9efd9e..29d428aa7dc41aeb4b3a5085ebb84e2be35d6370 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark
 
 import org.scalatest.BeforeAndAfterAll
 
-
 class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
 
   // This test suite should run all tests in ShuffleSuite with Netty shuffle mode.
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index db717865db5d00403463da020dbc78d0188dd5fd..abea36f7c83df396bef6990a9c0d357df2a644a3 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -22,10 +22,9 @@ import org.scalatest.matchers.ShouldMatchers
 
 import org.apache.spark.SparkContext._
 import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
-import org.apache.spark.rdd.{RDD, SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
-import org.apache.spark.util.MutablePair
+import org.apache.spark.rdd.{CoGroupedRDD, OrderedRDDFunctions, RDD, ShuffledRDD, SubtractedRDD}
 import org.apache.spark.serializer.KryoSerializer
-
+import org.apache.spark.util.MutablePair
 
 class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
   test("groupByKey without compression") {
diff --git a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index 939fe518012d0c17c9b319c2d94c08f9ffa7e7f6..5cb49d9a7fce1b5560d208a107e9751fa0426932 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -18,7 +18,6 @@
 package org.apache.spark
 
 import org.scalatest.FunSuite
-import org.apache.spark.SparkContext._
 
 class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
   test("getPersistentRDDs only returns RDDs that are marked as cached") {
diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index 75d6493e338fe71683316538a32ca679357a447d..b5383d553add10b096aa82d2fca4f357c7d45cf4 100644
--- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -22,9 +22,6 @@ import java.util.concurrent.atomic.AtomicBoolean
 import java.util.concurrent.atomic.AtomicInteger
 
 import org.scalatest.FunSuite
-import org.scalatest.BeforeAndAfter
-
-import SparkContext._
 
 /**
  * Holds state shared across task threads in some ThreadingSuite tests.
diff --git a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
index 768ca3850e7e79d32b324ddcd7c72abfad00d9ed..42ff059e018a3c3302e5f274c587e038a67fdf4f 100644
--- a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
@@ -19,8 +19,7 @@ package org.apache.spark
 
 import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
-import org.scalatest.time.{Span, Millis}
-import org.apache.spark.SparkContext._
+import org.scalatest.time.{Millis, Span}
 
 class UnpersistSuite extends FunSuite with LocalSparkContext {
   test("unpersist RDD") {
diff --git a/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
index 618b9c113b849d765ee1fad04ec4ac415b6c8a96..4f87fd8654c4ae680023a8b3b1718fb9ee5a3d42 100644
--- a/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
@@ -17,16 +17,7 @@
 
 package org.apache.spark
 
-import scala.collection.immutable.NumericRange
-
 import org.scalatest.FunSuite
-import org.scalatest.prop.Checkers
-import org.scalacheck.Arbitrary._
-import org.scalacheck.Gen
-import org.scalacheck.Prop._
-
-import SparkContext._
-
 
 object ZippedPartitionsSuite {
   def procZippedData(i: Iterator[Int], s: Iterator[String], d: Iterator[Double]) : Iterator[Int] = {
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
index 5bcebabc9a4199fb31a77444790c8a383be19978..7b866f08a0e9f81f6475950cbeb8b5157990facc 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
@@ -17,11 +17,10 @@
 
 package org.apache.spark.api.python
 
-import org.scalatest.FunSuite
-import org.scalatest.matchers.ShouldMatchers
-
 import java.io.{ByteArrayOutputStream, DataOutputStream}
 
+import org.scalatest.FunSuite
+
 class PythonRDDSuite extends FunSuite {
 
     test("Writing large strings to the worker") {
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 6445db0063fc49ea8dc9fe92fc9b54f057ac034c..de866ed7ffed8d6e1b866f6f5b2961931b9921d5 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -27,7 +27,7 @@ import org.scalatest.FunSuite
 
 import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
 import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo}
-import org.apache.spark.deploy.worker.{ExecutorRunner, DriverRunner}
+import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
 
 class JsonProtocolSuite extends FunSuite {
 
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
index 0c502612647a2d589544374d47c6806a2cb3fd14..a2c131b0c978789155dce50fd6eb87065865cde3 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
@@ -19,15 +19,13 @@ package org.apache.spark.deploy.worker
 
 import java.io.File
 
-import scala.collection.JavaConversions._
-
 import org.mockito.Mockito._
 import org.mockito.Matchers._
+import org.mockito.invocation.InvocationOnMock
+import org.mockito.stubbing.Answer
 import org.scalatest.FunSuite
 
 import org.apache.spark.deploy.{Command, DriverDescription}
-import org.mockito.stubbing.Answer
-import org.mockito.invocation.InvocationOnMock
 
 class DriverRunnerTest extends FunSuite {
   private def createDriverRunner() = {
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
index 4baa65659f041e046e8c74d84f6210a59103e9d0..3cab8e7b379341def636162693721e55d36251b6 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
@@ -21,7 +21,7 @@ import java.io.File
 
 import org.scalatest.FunSuite
 
-import org.apache.spark.deploy.{ExecutorState, Command, ApplicationDescription}
+import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
 
 class ExecutorRunnerTest extends FunSuite {
   test("command includes appId") {
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
index 1f1d8d138005bb736184c1ba8aaf02830e596436..0b5ed6d77034b32a9b28b3be9cf778d3c0fd298d 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
@@ -17,11 +17,10 @@
 
 package org.apache.spark.deploy.worker
 
-
+import akka.actor.{ActorSystem, AddressFromURIString, Props}
 import akka.testkit.TestActorRef
-import org.scalatest.FunSuite
 import akka.remote.DisassociatedEvent
-import akka.actor.{ActorSystem, AddressFromURIString, Props}
+import org.scalatest.FunSuite
 
 class WorkerWatcherSuite extends FunSuite {
   test("WorkerWatcher shuts down on valid disassociation") {
diff --git a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
index 8d7546085f489767b313c7f96f83e74b3f0b9ffe..68a0ea36aa545730fa89f6035022c1872a3b69be 100644
--- a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
@@ -20,8 +20,8 @@ package org.apache.spark.io
 import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
 
 import org.scalatest.FunSuite
-import org.apache.spark.SparkConf
 
+import org.apache.spark.SparkConf
 
 class CompressionCodecSuite extends FunSuite {
   val conf = new SparkConf(false)
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index 71a2c6c498eef7244b8fab582dc533ec14bd9aa1..c1e8b295dfe3b8955df84172541905a5e6a22e5d 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -18,8 +18,9 @@
 package org.apache.spark.metrics
 
 import org.scalatest.{BeforeAndAfter, FunSuite}
-import org.apache.spark.deploy.master.MasterSource
+
 import org.apache.spark.SparkConf
+import org.apache.spark.deploy.master.MasterSource
 
 class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
   var filePath: String = _
diff --git a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
index 0d4c10db8ef33368cb662f4c896f045a80e41b4b..3b833f2e41867112f82596f89cc4aad70e6f2307 100644
--- a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
@@ -30,7 +30,6 @@ import org.scalatest.time.SpanSugar._
 import org.apache.spark.SparkContext._
 import org.apache.spark.{SparkContext, SparkException, LocalSparkContext}
 
-
 class AsyncRDDActionsSuite extends FunSuite with BeforeAndAfterAll with Timeouts {
 
   @transient private var sc: SparkContext = _
diff --git a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
index 7f50a5a47c2ff3ef5f909469dca837bc399171f8..a822bd18bfdbd5737ba21e6d7151d986ff3a9089 100644
--- a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
@@ -17,14 +17,10 @@
 
 package org.apache.spark.rdd
 
-import scala.math.abs
-import scala.collection.mutable.ArrayBuffer
-
 import org.scalatest.FunSuite
 
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd._
 import org.apache.spark._
+import org.apache.spark.SparkContext._
 
 class DoubleRDDSuite extends FunSuite with SharedSparkContext {
   // Verify tests on the histogram functionality. We test with both evenly
diff --git a/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
index 3d39a31252e5e7a4cd5c09657a692d7b58dcc01c..7c7f69b261a0a579cc750504c6645c4c6ab78865 100644
--- a/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark
 
-import org.scalatest.{ BeforeAndAfter, FunSuite }
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.JdbcRDD
 import java.sql._
 
+import org.scalatest.{BeforeAndAfter, FunSuite}
+
+import org.apache.spark.rdd.JdbcRDD
+
 class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
 
   before {
diff --git a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
index 5da538a1ddfd5ee05063d6a3e555db1f5305fcb7..fa5c9b10fe059272f3fe7ac4d6762751c13f25e5 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
@@ -22,12 +22,11 @@ import scala.collection.mutable.HashSet
 import scala.util.Random
 
 import org.scalatest.FunSuite
-
 import com.google.common.io.Files
+
 import org.apache.spark.SparkContext._
 import org.apache.spark.{Partitioner, SharedSparkContext}
 
-
 class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext {
   test("groupByKey") {
     val pairs = sc.parallelize(Array((1, 1), (1, 2), (1, 3), (2, 1)))
diff --git a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
index a80afdee7e76943b2e6a2591aa8d38fe86543337..a4381a8b974df40ff71ddf814a828c09f07a2178 100644
--- a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
@@ -19,11 +19,11 @@ package org.apache.spark.rdd
 
 import scala.collection.immutable.NumericRange
 
-import org.scalatest.FunSuite
-import org.scalatest.prop.Checkers
 import org.scalacheck.Arbitrary._
 import org.scalacheck.Gen
 import org.scalacheck.Prop._
+import org.scalatest.FunSuite
+import org.scalatest.prop.Checkers
 
 class ParallelCollectionSplitSuite extends FunSuite with Checkers {
   test("one element per slice") {
diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
index 53a7b7c44df1c8345d0656c79f734cce6a1b47cd..956c2b9cbd321de72a110a678c6e428144f85e63 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala
@@ -18,8 +18,8 @@
 package org.apache.spark.rdd
 
 import org.scalatest.FunSuite
-import org.apache.spark.{TaskContext, Partition, SharedSparkContext}
 
+import org.apache.spark.{Partition, SharedSparkContext, TaskContext}
 
 class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext {
 
diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala
index cfe96fb3f7b953d05fa4950506c73b4468d9fe80..00c273df63b29f45f827bd6cc225e42793e7572b 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.rdd
 
 import org.scalatest.FunSuite
+
 import org.apache.spark.SharedSparkContext
 import org.apache.spark.util.random.RandomSampler
 
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index 308c7cc8c3b1488e0bfb8e81b9568b9c1d7acd1d..60bcada55245bb6adc38c11dda69b19a751bead2 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -18,13 +18,15 @@
 package org.apache.spark.rdd
 
 import scala.collection.mutable.HashMap
+import scala.collection.parallel.mutable
+
 import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
-import org.scalatest.time.{Span, Millis}
+import org.scalatest.time.{Millis, Span}
+
+import org.apache.spark._
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd._
-import scala.collection.parallel.mutable
-import org.apache.spark._
 
 class RDDSuite extends FunSuite with SharedSparkContext {
 
diff --git a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
index e8361199421f117cc11168fa1321fa14ad58ca21..d0619559bb45730588ca80b3abbe07763ee6b3e0 100644
--- a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.rdd
 
 import org.scalatest.FunSuite
-import org.scalatest.BeforeAndAfter
 import org.scalatest.matchers.ShouldMatchers
 
 import org.apache.spark.{Logging, SharedSparkContext}
diff --git a/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala
index 98ea4cb5612ecabe68c6d332686a8fb1a41460e0..85e929925e3b520439c64f6d8e457a133b382d26 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala
@@ -17,13 +17,13 @@
 
 package org.apache.spark.scheduler
 
-import org.scalatest.FunSuite
-import org.scalatest.BeforeAndAfter
+import java.util.Properties
 
-import org.apache.spark._
 import scala.collection.mutable.ArrayBuffer
 
-import java.util.Properties
+import org.scalatest.FunSuite
+
+import org.apache.spark._
 
 class FakeTaskSetManager(
     initPriority: Int,
diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index f0236ef1e975b4c0c28959d7c50a63a535509087..ad890b4e4d08e2f84190e4818d1936093575d2c5 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -20,11 +20,12 @@ package org.apache.spark.scheduler
 import scala.Tuple2
 import scala.collection.mutable.{HashMap, Map}
 
+import org.scalatest.{BeforeAndAfter, FunSuite}
+
 import org.apache.spark._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
 import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
-import org.scalatest.{BeforeAndAfter, FunSuite}
 
 /**
  * Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
diff --git a/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
index 29102913c719c12a9067782beb8a85a526ec91e2..25fe63c2655a83c2bcc2027189d88cb1ba618918 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
@@ -17,11 +17,6 @@
 
 package org.apache.spark.scheduler
 
-import java.util.Properties
-import java.util.concurrent.LinkedBlockingQueue
-
-import scala.collection.mutable
-
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
 
@@ -29,7 +24,6 @@ import org.apache.spark._
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
-
 class JobLoggerSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
   val WAIT_TIMEOUT_MILLIS = 10000
 
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
index e31a116a75bf39450612d0a84ba39987a9cafd6a..8bb5317cd2875c033e74ca20ef4210c360b75d0e 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
@@ -19,11 +19,12 @@ package org.apache.spark.scheduler
 
 import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfter
+
+import org.apache.spark.LocalSparkContext
+import org.apache.spark.Partition
+import org.apache.spark.SparkContext
 import org.apache.spark.TaskContext
 import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext
-import org.apache.spark.Partition
-import org.apache.spark.LocalSparkContext
 
 class TaskContextSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
 
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
index 4b52d9651ebe82cbc8ff640e09445d89e0bd4299..ac07f60e284bbb943986336fbf883fa41bd3684f 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
@@ -21,7 +21,7 @@ import java.nio.ByteBuffer
 
 import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite}
 
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv}
+import org.apache.spark.{LocalSparkContext, SparkContext, SparkEnv}
 import org.apache.spark.storage.TaskResultBlockId
 
 /**
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
index de321c45b547ca2510060b996bcc67436e91f388..34a7d8cefeea21e8d92ad5105f9bfdde2707ad04 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
@@ -24,8 +24,7 @@ import org.scalatest.FunSuite
 
 import org.apache.spark._
 import org.apache.spark.executor.TaskMetrics
-import java.nio.ByteBuffer
-import org.apache.spark.util.{Utils, FakeClock}
+import org.apache.spark.util.FakeClock
 
 class FakeDAGScheduler(taskScheduler: FakeClusterScheduler) extends DAGScheduler(taskScheduler) {
   override def taskStarted(task: Task[_], taskInfo: TaskInfo) {
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index 3898583275557ac0089b69b06b3056605d34e0cf..5d4673aebe9e8d6a8d8d855f1ed7276b508904c1 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -20,9 +20,9 @@ package org.apache.spark.serializer
 import scala.collection.mutable
 
 import com.esotericsoftware.kryo.Kryo
-
 import org.scalatest.FunSuite
-import org.apache.spark.{SparkConf, SharedSparkContext}
+
+import org.apache.spark.SharedSparkContext
 import org.apache.spark.serializer.KryoTest._
 
 class KryoSerializerSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index 85011c6451777ef6b5e72a0128e0bd7fc38beafe..9f011d9c8d132aa1d87d34cf1c7c5f43d808ae5c 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -20,18 +20,17 @@ package org.apache.spark.storage
 import java.nio.ByteBuffer
 
 import akka.actor._
-
-import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfter
+import org.scalatest.FunSuite
 import org.scalatest.PrivateMethodTester
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.concurrent.Timeouts._
 import org.scalatest.matchers.ShouldMatchers._
 import org.scalatest.time.SpanSugar._
 
-import org.apache.spark.util.{SizeEstimator, Utils, AkkaUtils, ByteBufferInputStream}
-import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
 import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
+import org.apache.spark.util.{AkkaUtils, ByteBufferInputStream, SizeEstimator, Utils}
 
 class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester {
   private val conf = new SparkConf(false)
diff --git a/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala
index 829f389460f3bdca7480db59fab2e2d0b0455c89..62f9b3cc7b2c188f0e8c381f39614194396f3f64 100644
--- a/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala
@@ -22,9 +22,10 @@ import java.io.{File, FileWriter}
 import scala.collection.mutable
 
 import com.google.common.io.Files
-import org.apache.spark.SparkConf
 import org.scalatest.{BeforeAndAfterEach, FunSuite}
 
+import org.apache.spark.SparkConf
+
 class DiskBlockManagerSuite extends FunSuite with BeforeAndAfterEach {
   private val testConf = new SparkConf(false)
   val rootDir0 = Files.createTempDir()
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index c17bbfe7d35ba042e1fcb2d6b3b294c7fa7d44af..20ebb1897e6bae672bd349cbdf0b44016cca13c9 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -17,10 +17,12 @@
 
 package org.apache.spark.ui
 
-import scala.util.{Failure, Success, Try}
 import java.net.ServerSocket
-import org.scalatest.FunSuite
+
+import scala.util.{Failure, Success, Try}
+
 import org.eclipse.jetty.server.Server
+import org.scalatest.FunSuite
 
 class UISuite extends FunSuite {
   test("jetty port increases under contention") {
diff --git a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
index 67a57a0e7f9d0cb8c08805a358c5c691c430511f..8ca863e8b3667e3d9e65fca062ea799b71e06e8a 100644
--- a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala
@@ -18,10 +18,10 @@
 package org.apache.spark.ui.jobs
 
 import org.scalatest.FunSuite
-import org.apache.spark.scheduler._
+
 import org.apache.spark.{LocalSparkContext, SparkContext, Success}
-import org.apache.spark.scheduler.SparkListenerTaskStart
 import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
+import org.apache.spark.scheduler._
 
 class JobProgressListenerSuite extends FunSuite with LocalSparkContext {
   test("test executor id to summary") {
diff --git a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
index de4871d0433ef35d5ba20fd982dc4c5ff7185874..439e5644e20a37bae720cb8592cddf6e535d23ff 100644
--- a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
@@ -17,12 +17,10 @@
 
 package org.apache.spark.util
 
-import java.io.NotSerializableException
-
 import org.scalatest.FunSuite
 
-import org.apache.spark.SparkContext
 import org.apache.spark.LocalSparkContext._
+import org.apache.spark.SparkContext
 
 class ClosureCleanerSuite extends FunSuite {
   test("closures inside an object") {
diff --git a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
index 45867463a5132a7a8a004372912b4b6896c6a0b9..e1446cbc90bdbb2db98a63a15a425725c29e6ad7 100644
--- a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
@@ -17,10 +17,12 @@
 
 package org.apache.spark.util
 
+import java.util.NoSuchElementException
+
+import scala.collection.mutable.Buffer
+
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
-import scala.collection.mutable.Buffer
-import java.util.NoSuchElementException
 
 class NextIteratorSuite extends FunSuite with ShouldMatchers {
   test("one iteration") {
diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index 11ebdc352bd74ae2f85bf300bdf9316d044406dc..b583a8bd46ae56ab17898fe89c96cd7b04e9957f 100644
--- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -17,10 +17,9 @@
 
 package org.apache.spark.util
 
-import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfterAll
+import org.scalatest.FunSuite
 import org.scalatest.PrivateMethodTester
-import org.apache.spark.SparkContext
 
 class DummyClass1 {}
 
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 7030ba4858b9d18c266da516cf9ff0f5e6d5b19d..8f55b2372c9f17f920c3b6e2c3c27b00f6735ff0 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -17,13 +17,15 @@
 
 package org.apache.spark.util
 
+import scala.util.Random
+
+import java.io.{ByteArrayOutputStream, ByteArrayInputStream, FileOutputStream}
+import java.nio.{ByteBuffer, ByteOrder}
+
 import com.google.common.base.Charsets
 import com.google.common.io.Files
-import java.io.{ByteArrayOutputStream, ByteArrayInputStream, FileOutputStream, File}
-import java.nio.{ByteBuffer, ByteOrder}
-import org.scalatest.FunSuite
 import org.apache.commons.io.FileUtils
-import scala.util.Random
+import org.scalatest.FunSuite
 
 class UtilsSuite extends FunSuite {
 
diff --git a/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala
index f44442f1a5328017b0205e09b19f1b92a1a5412e..52c7288e18b6948a2d2afc71203f2d49f7bdf6be 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.util.collection
 
+import java.util.Comparator
+
 import scala.collection.mutable.HashSet
 
 import org.scalatest.FunSuite
-import java.util.Comparator
 
 class AppendOnlyMapSuite extends FunSuite {
   test("initialization") {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala
index 0f1ab3d20eea4456385f26df2c724595450e6234..c32183c134f9cc87c0826d9d949557466cf8d071 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark.util.collection
 
 import org.scalatest.FunSuite
 
-
 class BitSetSuite extends FunSuite {
 
   test("basic set and get") {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
index e9b62ea70db2243c562ff02d657d1bf7ad11db58..b024c89d94d3304f40e8579c4bea76c000b3aaa2 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
@@ -18,8 +18,10 @@
 package org.apache.spark.util.collection
 
 import scala.collection.mutable.HashSet
+
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
+
 import org.apache.spark.util.SizeEstimator
 
 class OpenHashMapSuite extends FunSuite with ShouldMatchers {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
index 1b24f8f287cfd3d00d9cc271cb6a186990acbdb2..ff4a98f5dcd4ad3ab76b7458c67afabad8a76e2f 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala
@@ -22,7 +22,6 @@ import org.scalatest.matchers.ShouldMatchers
 
 import org.apache.spark.util.SizeEstimator
 
-
 class OpenHashSetSuite extends FunSuite with ShouldMatchers {
 
   test("size for specialized, primitive int") {
diff --git a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
index 3b60decee9acb71c18ab19186d1b7df6aadaa4ca..e3fca173908e943fd3d20216a832057e474b2a9f 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala
@@ -18,8 +18,10 @@
 package org.apache.spark.util.collection
 
 import scala.collection.mutable.HashSet
+
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
+
 import org.apache.spark.util.SizeEstimator
 
 class PrimitiveKeyOpenHashMapSuite extends FunSuite with ShouldMatchers {
diff --git a/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala b/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala
index 0f4792cd3bdb31fbe6a8d9ba14c26db29a435928..7576c9a51f313da9450f6c8a50f20d1681946ac1 100644
--- a/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala
@@ -17,11 +17,11 @@
 
 package org.apache.spark.util.random
 
-import org.scalatest.{BeforeAndAfter, FunSuite}
-import org.scalatest.mock.EasyMockSugar
-
 import java.util.Random
+
 import cern.jet.random.Poisson
+import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.mock.EasyMockSugar
 
 class RandomSamplerSuite extends FunSuite with BeforeAndAfter with EasyMockSugar {
 
diff --git a/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala b/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala
index 352aa94219c2f116455effa21d571d720d02b1e3..c51d12bfe0bc6f6d7eb96b31710178e832cd5084 100644
--- a/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.util.random
 
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
+
 import org.apache.spark.util.Utils.times
 
 class XORShiftRandomSuite extends FunSuite with ShouldMatchers {