diff --git a/examples/src/main/java/spark/examples/JavaKMeans.java b/examples/src/main/java/spark/examples/JavaKMeans.java
index 2d7ba6aecee0237049a5628688a486da78c8bd03..626034eb0d4d3a59f49a45e0a71d90896a095da1 100644
--- a/examples/src/main/java/spark/examples/JavaKMeans.java
+++ b/examples/src/main/java/spark/examples/JavaKMeans.java
@@ -16,99 +16,99 @@ import java.util.Map;
  */
 public class JavaKMeans {
 
-    /** Parses numbers split by whitespace to a vector */
-    static Vector parseVector(String line) {
-        String[] splits = line.split(" ");
-        double[] data = new double[splits.length];
-        int i = 0;
-        for (String s : splits)
-            data[i] = Double.parseDouble(splits[i++]);
-        return new Vector(data);
+  /** Parses numbers split by whitespace to a vector */
+  static Vector parseVector(String line) {
+    String[] splits = line.split(" ");
+    double[] data = new double[splits.length];
+    int i = 0;
+    for (String s : splits)
+      data[i] = Double.parseDouble(splits[i++]);
+    return new Vector(data);
+  }
+
+  /** Computes the vector to which the input vector is closest using squared distance */
+  static int closestPoint(Vector p, List<Vector> centers) {
+    int bestIndex = 0;
+    double closest = Double.POSITIVE_INFINITY;
+    for (int i = 0; i < centers.size(); i++) {
+      double tempDist = p.squaredDist(centers.get(i));
+      if (tempDist < closest) {
+        closest = tempDist;
+        bestIndex = i;
+      }
     }
+    return bestIndex;
+  }
 
-    /** Computes the vector to which the input vector is closest using squared distance */
-    static int closestPoint(Vector p, List<Vector> centers) {
-        int bestIndex = 0;
-        double closest = Double.POSITIVE_INFINITY;
-        for (int i = 0; i < centers.size(); i++) {
-            double tempDist = p.squaredDist(centers.get(i));
-            if (tempDist < closest) {
-                closest = tempDist;
-                bestIndex = i;
-            }
-        }
-        return bestIndex;
+  /** Computes the mean across all vectors in the input set of vectors */
+  static Vector average(List<Vector> ps) {
+    int numVectors = ps.size();
+    Vector out = new Vector(ps.get(0).elements());
+    // start from i = 1 since we already copied index 0 above
+    for (int i = 1; i < numVectors; i++) {
+      out.addInPlace(ps.get(i));
     }
+    return out.divide(numVectors);
+  }
 
-    /** Computes the mean across all vectors in the input set of vectors */
-    static Vector average(List<Vector> ps) {
-        int numVectors = ps.size();
-        Vector out = new Vector(ps.get(0).elements());
-        // start from i = 1 since we already copied index 0 above
-        for (int i = 1; i < numVectors; i++) {
-            out.addInPlace(ps.get(i));
-        }
-        return out.divide(numVectors);
+  public static void main(String[] args) throws Exception {
+    if (args.length < 4) {
+      System.err.println("Usage: JavaKMeans <master> <file> <k> <convergeDist>");
+      System.exit(1);
     }
+    JavaSparkContext sc = new JavaSparkContext(args[0], "JavaKMeans",
+      System.getenv("SPARK_HOME"), System.getenv("SPARK_EXAMPLES_JAR"));
+    String path = args[1];
+    int K = Integer.parseInt(args[2]);
+    double convergeDist = Double.parseDouble(args[3]);
 
-    public static void main(String[] args) throws Exception {
-        if (args.length < 4) {
-            System.err.println("Usage: JavaKMeans <master> <file> <k> <convergeDist>");
-            System.exit(1);
+    JavaRDD<Vector> data = sc.textFile(path).map(
+      new Function<String, Vector>() {
+        @Override
+        public Vector call(String line) throws Exception {
+          return parseVector(line);
         }
-        JavaSparkContext sc = new JavaSparkContext(args[0], "JavaKMeans",
-                System.getenv("SPARK_HOME"), System.getenv("SPARK_EXAMPLES_JAR"));
-        String path = args[1];
-        int K = Integer.parseInt(args[2]);
-        double convergeDist = Double.parseDouble(args[3]);
+      }
+    ).cache();
 
-        JavaRDD<Vector> data = sc.textFile(path).map(
-                new Function<String, Vector>() {
-                    @Override
-                    public Vector call(String line) throws Exception {
-                        return parseVector(line);
-                    }
-                }
-        ).cache();
+    final List<Vector> centroids = data.takeSample(false, K, 42);
 
-        final List<Vector> centroids = data.takeSample(false, K, 42);
-
-        double tempDist;
-        do {
-            // allocate each vector to closest centroid
-            JavaPairRDD<Integer, Vector> closest = data.map(
-                    new PairFunction<Vector, Integer, Vector>() {
-                        @Override
-                        public Tuple2<Integer, Vector> call(Vector vector) throws Exception {
-                            return new Tuple2<Integer, Vector>(
-                                    closestPoint(vector, centroids), vector);
-                        }
-                    }
-            );
+    double tempDist;
+    do {
+      // allocate each vector to closest centroid
+      JavaPairRDD<Integer, Vector> closest = data.map(
+        new PairFunction<Vector, Integer, Vector>() {
+          @Override
+          public Tuple2<Integer, Vector> call(Vector vector) throws Exception {
+            return new Tuple2<Integer, Vector>(
+              closestPoint(vector, centroids), vector);
+          }
+        }
+      );
 
-            // group by cluster id and average the vectors within each cluster to compute centroids
-            JavaPairRDD<Integer, List<Vector>> pointsGroup = closest.groupByKey();
-            Map<Integer, Vector> newCentroids = pointsGroup.mapValues(
-                    new Function<List<Vector>, Vector>() {
-                        public Vector call(List<Vector> ps) throws Exception {
-                            return average(ps);
-                        }
-                    }).collectAsMap();
-            tempDist = 0.0;
-            for (int i = 0; i < K; i++) {
-                tempDist += centroids.get(i).squaredDist(newCentroids.get(i));
-            }
-            for (Map.Entry<Integer, Vector> t: newCentroids.entrySet()) {
-                centroids.set(t.getKey(), t.getValue());
-            }
-            System.out.println("Finished iteration (delta = " + tempDist + ")");
-        } while (tempDist > convergeDist);
+      // group by cluster id and average the vectors within each cluster to compute centroids
+      JavaPairRDD<Integer, List<Vector>> pointsGroup = closest.groupByKey();
+      Map<Integer, Vector> newCentroids = pointsGroup.mapValues(
+        new Function<List<Vector>, Vector>() {
+          public Vector call(List<Vector> ps) throws Exception {
+            return average(ps);
+          }
+        }).collectAsMap();
+      tempDist = 0.0;
+      for (int i = 0; i < K; i++) {
+        tempDist += centroids.get(i).squaredDist(newCentroids.get(i));
+      }
+      for (Map.Entry<Integer, Vector> t: newCentroids.entrySet()) {
+        centroids.set(t.getKey(), t.getValue());
+      }
+      System.out.println("Finished iteration (delta = " + tempDist + ")");
+    } while (tempDist > convergeDist);
 
-        System.out.println("Final centers:");
-        for (Vector c : centroids)
-            System.out.println(c);
+    System.out.println("Final centers:");
+    for (Vector c : centroids)
+      System.out.println(c);
 
-        System.exit(0);
+    System.exit(0);
 
-}
+  }
 }
diff --git a/examples/src/main/java/spark/examples/JavaLogQuery.java b/examples/src/main/java/spark/examples/JavaLogQuery.java
index 40f33aaa735d140d19b479e8d7d5378c210f5851..6b22e7120c9174ccc602482aac062383d748ebe2 100644
--- a/examples/src/main/java/spark/examples/JavaLogQuery.java
+++ b/examples/src/main/java/spark/examples/JavaLogQuery.java
@@ -20,96 +20,95 @@ import java.util.regex.Pattern;
  */
 public class JavaLogQuery {
 
-    public static List<String> exampleApacheLogs = Lists.newArrayList(
-            "10.10.10.10 - \"FRED\" [18/Jan/2013:17:56:07 +1100] \"GET http://images.com/2013/Generic.jpg " +
-            "HTTP/1.1\" 304 315 \"http://referall.com/\" \"Mozilla/4.0 (compatible; MSIE 7.0; " +
-            "Windows NT 5.1; GTB7.4; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; " +
-            ".NET CLR 3.5.21022; .NET CLR 3.0.4506.2152; .NET CLR 1.0.3705; .NET CLR 1.1.4322; .NET CLR " +
-            "3.5.30729; Release=ARP)\" \"UD-1\" - \"image/jpeg\" \"whatever\" 0.350 \"-\" - \"\" 265 923 934 \"\" " +
-            "62.24.11.25 images.com 1358492167 - Whatup",
-            "10.10.10.10 - \"FRED\" [18/Jan/2013:18:02:37 +1100] \"GET http://images.com/2013/Generic.jpg " +
-            "HTTP/1.1\" 304 306 \"http:/referall.com\" \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; " +
-            "GTB7.4; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR " +
-            "3.5.21022; .NET CLR 3.0.4506.2152; .NET CLR 1.0.3705; .NET CLR 1.1.4322; .NET CLR  " +
-            "3.5.30729; Release=ARP)\" \"UD-1\" - \"image/jpeg\" \"whatever\" 0.352 \"-\" - \"\" 256 977 988 \"\" " +
-            "0 73.23.2.15 images.com 1358492557 - Whatup");
-
-
-    public static Pattern apacheLogRegex = Pattern.compile(
-        "^([\\d.]+) (\\S+) (\\S+) \\[([\\w\\d:/]+\\s[+\\-]\\d{4})\\] \"(.+?)\" (\\d{3}) ([\\d\\-]+) \"([^\"]+)\" \"([^\"]+)\".*");
-
-    /** Tracks the total query count and number of aggregate bytes for a particular group. */
-    public static class Stats implements Serializable {
-
-        private int count;
-        private int numBytes;
-
-        public Stats(int count, int numBytes) {
-            this.count = count;
-            this.numBytes = numBytes;
-        }
-        public Stats merge(Stats other) {
-            return new Stats(count + other.count, numBytes + other.numBytes);
-        }
-
-        public String toString() {
-            return String.format("bytes=%s\tn=%s", numBytes, count);
-        }
+  public static List<String> exampleApacheLogs = Lists.newArrayList(
+    "10.10.10.10 - \"FRED\" [18/Jan/2013:17:56:07 +1100] \"GET http://images.com/2013/Generic.jpg " +
+      "HTTP/1.1\" 304 315 \"http://referall.com/\" \"Mozilla/4.0 (compatible; MSIE 7.0; " +
+      "Windows NT 5.1; GTB7.4; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; " +
+      ".NET CLR 3.5.21022; .NET CLR 3.0.4506.2152; .NET CLR 1.0.3705; .NET CLR 1.1.4322; .NET CLR " +
+      "3.5.30729; Release=ARP)\" \"UD-1\" - \"image/jpeg\" \"whatever\" 0.350 \"-\" - \"\" 265 923 934 \"\" " +
+      "62.24.11.25 images.com 1358492167 - Whatup",
+    "10.10.10.10 - \"FRED\" [18/Jan/2013:18:02:37 +1100] \"GET http://images.com/2013/Generic.jpg " +
+      "HTTP/1.1\" 304 306 \"http:/referall.com\" \"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; " +
+      "GTB7.4; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648; .NET CLR " +
+      "3.5.21022; .NET CLR 3.0.4506.2152; .NET CLR 1.0.3705; .NET CLR 1.1.4322; .NET CLR  " +
+      "3.5.30729; Release=ARP)\" \"UD-1\" - \"image/jpeg\" \"whatever\" 0.352 \"-\" - \"\" 256 977 988 \"\" " +
+      "0 73.23.2.15 images.com 1358492557 - Whatup");
+
+  public static Pattern apacheLogRegex = Pattern.compile(
+    "^([\\d.]+) (\\S+) (\\S+) \\[([\\w\\d:/]+\\s[+\\-]\\d{4})\\] \"(.+?)\" (\\d{3}) ([\\d\\-]+) \"([^\"]+)\" \"([^\"]+)\".*");
+
+  /** Tracks the total query count and number of aggregate bytes for a particular group. */
+  public static class Stats implements Serializable {
+
+    private int count;
+    private int numBytes;
+
+    public Stats(int count, int numBytes) {
+      this.count = count;
+      this.numBytes = numBytes;
     }
-
-    public static Tuple3<String, String, String> extractKey(String line) {
-        Matcher m = apacheLogRegex.matcher(line);
-        List<String> key = Collections.emptyList();
-        if (m.find()) {
-            String ip = m.group(1);
-            String user = m.group(3);
-            String query = m.group(5);
-            if (!user.equalsIgnoreCase("-")) {
-                return new Tuple3<String, String, String>(ip, user, query);
-            }
-        }
-        return new Tuple3<String, String, String>(null, null, null);
+    public Stats merge(Stats other) {
+      return new Stats(count + other.count, numBytes + other.numBytes);
     }
 
-    public static Stats extractStats(String line) {
-        Matcher m = apacheLogRegex.matcher(line);
-        if (m.find()) {
-            int bytes = Integer.parseInt(m.group(7));
-            return new Stats(1, bytes);
-        }
-        else
-            return new Stats(1, 0);
+    public String toString() {
+      return String.format("bytes=%s\tn=%s", numBytes, count);
+    }
+  }
+
+  public static Tuple3<String, String, String> extractKey(String line) {
+    Matcher m = apacheLogRegex.matcher(line);
+    List<String> key = Collections.emptyList();
+    if (m.find()) {
+      String ip = m.group(1);
+      String user = m.group(3);
+      String query = m.group(5);
+      if (!user.equalsIgnoreCase("-")) {
+        return new Tuple3<String, String, String>(ip, user, query);
+      }
+    }
+    return new Tuple3<String, String, String>(null, null, null);
+  }
+
+  public static Stats extractStats(String line) {
+    Matcher m = apacheLogRegex.matcher(line);
+    if (m.find()) {
+      int bytes = Integer.parseInt(m.group(7));
+      return new Stats(1, bytes);
+    }
+    else
+      return new Stats(1, 0);
+  }
+
+  public static void main(String[] args) throws Exception {
+    if (args.length == 0) {
+      System.err.println("Usage: JavaLogQuery <master> [logFile]");
+      System.exit(1);
     }
 
-    public static void main(String[] args) throws Exception {
-        if (args.length == 0) {
-            System.err.println("Usage: JavaLogQuery <master> [logFile]");
-            System.exit(1);
-        }
-
-        JavaSparkContext jsc = new JavaSparkContext(args[0], "JavaLogQuery",
-               System.getenv("SPARK_HOME"), System.getenv("SPARK_EXAMPLES_JAR"));
-
-        JavaRDD<String> dataSet = (args.length == 2) ? jsc.textFile(args[1]) : jsc.parallelize(exampleApacheLogs);
-
-        JavaPairRDD<Tuple3<String, String, String>, Stats> extracted = dataSet.map(new PairFunction<String, Tuple3<String, String, String>, Stats>() {
-            @Override
-            public Tuple2<Tuple3<String, String, String>, Stats> call(String s) throws Exception {
-                return new Tuple2<Tuple3<String, String, String>, Stats>(extractKey(s), extractStats(s));
-            }
-        });
-
-        JavaPairRDD<Tuple3<String, String, String>, Stats> counts = extracted.reduceByKey(new Function2<Stats, Stats, Stats>() {
-            @Override
-            public Stats call(Stats stats, Stats stats2) throws Exception {
-                return stats.merge(stats2);
-            }
-        });
-
-        List<Tuple2<Tuple3<String, String, String>, Stats>> output = counts.collect();
-        for (Tuple2 t : output) {
-            System.out.println(t._1 + "\t" + t._2);
-        }
-        System.exit(0);
+    JavaSparkContext jsc = new JavaSparkContext(args[0], "JavaLogQuery",
+      System.getenv("SPARK_HOME"), System.getenv("SPARK_EXAMPLES_JAR"));
+
+    JavaRDD<String> dataSet = (args.length == 2) ? jsc.textFile(args[1]) : jsc.parallelize(exampleApacheLogs);
+
+    JavaPairRDD<Tuple3<String, String, String>, Stats> extracted = dataSet.map(new PairFunction<String, Tuple3<String, String, String>, Stats>() {
+      @Override
+      public Tuple2<Tuple3<String, String, String>, Stats> call(String s) throws Exception {
+        return new Tuple2<Tuple3<String, String, String>, Stats>(extractKey(s), extractStats(s));
+      }
+    });
+
+    JavaPairRDD<Tuple3<String, String, String>, Stats> counts = extracted.reduceByKey(new Function2<Stats, Stats, Stats>() {
+      @Override
+      public Stats call(Stats stats, Stats stats2) throws Exception {
+        return stats.merge(stats2);
+      }
+    });
+
+    List<Tuple2<Tuple3<String, String, String>, Stats>> output = counts.collect();
+    for (Tuple2 t : output) {
+      System.out.println(t._1 + "\t" + t._2);
     }
+    System.exit(0);
+  }
 }
diff --git a/examples/src/main/java/spark/examples/JavaSparkPi.java b/examples/src/main/java/spark/examples/JavaSparkPi.java
index e4cee97a42e0fc271f47fda4840df28b62515192..a15a967de85dfc505877f5a9d79ec30a481a505e 100644
--- a/examples/src/main/java/spark/examples/JavaSparkPi.java
+++ b/examples/src/main/java/spark/examples/JavaSparkPi.java
@@ -11,37 +11,38 @@ import java.util.List;
 /** Computes an approximation to pi */
 public class JavaSparkPi {
 
-    public static void main(String[] args) throws Exception {
-        if (args.length == 0) {
-            System.err.println("Usage: JavaLogQuery <master> [slices]");
-            System.exit(1);
-        }
-
-        JavaSparkContext jsc = new JavaSparkContext(args[0], "JavaLogQuery",
-                System.getenv("SPARK_HOME"), System.getenv("SPARK_EXAMPLES_JAR"));
-
-        int slices = (args.length == 2) ? Integer.parseInt(args[1]) : 2;
-        int n = 100000 * slices;
-        List<Integer> l = new ArrayList<Integer>(n);
-        for (int i = 0; i < n; i++)
-            l.add(i);
-
-        JavaRDD<Integer> dataSet = jsc.parallelize(l, slices);
-
-        int count = dataSet.map(new Function<Integer, Integer>() {
-            @Override
-            public Integer call(Integer integer) throws Exception {
-                double x = Math.random() * 2 - 1;
-                double y = Math.random() * 2 - 1;
-                return (x * x + y * y < 1) ? 1 : 0;
-            }
-        }).reduce(new Function2<Integer, Integer, Integer>() {
-            @Override
-            public Integer call(Integer integer, Integer integer2) throws Exception {
-                return integer + integer2;
-            }
-        });
-
-        System.out.println("Pi is roughly " + 4.0 * count / n);
+
+  public static void main(String[] args) throws Exception {
+    if (args.length == 0) {
+      System.err.println("Usage: JavaLogQuery <master> [slices]");
+      System.exit(1);
     }
+
+    JavaSparkContext jsc = new JavaSparkContext(args[0], "JavaLogQuery",
+      System.getenv("SPARK_HOME"), System.getenv("SPARK_EXAMPLES_JAR"));
+
+    int slices = (args.length == 2) ? Integer.parseInt(args[1]) : 2;
+    int n = 100000 * slices;
+    List<Integer> l = new ArrayList<Integer>(n);
+    for (int i = 0; i < n; i++)
+      l.add(i);
+
+    JavaRDD<Integer> dataSet = jsc.parallelize(l, slices);
+
+    int count = dataSet.map(new Function<Integer, Integer>() {
+      @Override
+      public Integer call(Integer integer) throws Exception {
+        double x = Math.random() * 2 - 1;
+        double y = Math.random() * 2 - 1;
+        return (x * x + y * y < 1) ? 1 : 0;
+      }
+    }).reduce(new Function2<Integer, Integer, Integer>() {
+      @Override
+      public Integer call(Integer integer, Integer integer2) throws Exception {
+        return integer + integer2;
+      }
+    });
+
+    System.out.println("Pi is roughly " + 4.0 * count / n);
+  }
 }