From 42dcdbcb2f230ea67caa70e3412dba67136c8654 Mon Sep 17 00:00:00 2001 From: Reynold Xin <reynoldx@gmail.com> Date: Thu, 29 Mar 2012 15:21:57 -0700 Subject: [PATCH] Removed the extra spaces in OrderedRDDFunctions and SortedRDD. --- .../main/scala/spark/PairRDDFunctions.scala | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/core/src/main/scala/spark/PairRDDFunctions.scala b/core/src/main/scala/spark/PairRDDFunctions.scala index 295fe81ce6..4982a1aa15 100644 --- a/core/src/main/scala/spark/PairRDDFunctions.scala +++ b/core/src/main/scala/spark/PairRDDFunctions.scala @@ -359,28 +359,28 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest]( def getValueClass() = implicitly[ClassManifest[V]].erasure } - class OrderedRDDFunctions[K <% Ordered[K]: ClassManifest, V: ClassManifest]( - self: RDD[(K, V)]) - extends Logging - with Serializable { +class OrderedRDDFunctions[K <% Ordered[K]: ClassManifest, V: ClassManifest]( + self: RDD[(K, V)]) + extends Logging + with Serializable { - def sortByKey(ascending: Boolean = true): RDD[(K,V)] = { - val rangePartitionedRDD = self.partitionBy(new RangePartitioner(self.splits.size, self, ascending)) - new SortedRDD(rangePartitionedRDD, ascending) - } + def sortByKey(ascending: Boolean = true): RDD[(K,V)] = { + val rangePartitionedRDD = self.partitionBy(new RangePartitioner(self.splits.size, self, ascending)) + new SortedRDD(rangePartitionedRDD, ascending) } - - class SortedRDD[K <% Ordered[K], V](prev: RDD[(K, V)], ascending: Boolean) - extends RDD[(K, V)](prev.context) { - - override def splits = prev.splits - override val partitioner = prev.partitioner - override val dependencies = List(new OneToOneDependency(prev)) - override def compute(split: Split) = { - prev.iterator(split).toArray - .sortWith((x, y) => if (ascending) x._1 < y._1 else x._1 > y._1).iterator - } - } +} + +class SortedRDD[K <% Ordered[K], V](prev: RDD[(K, V)], ascending: Boolean) + extends RDD[(K, V)](prev.context) { + + override def splits = prev.splits + override val partitioner = prev.partitioner + override val dependencies = List(new OneToOneDependency(prev)) + override def compute(split: Split) = { + prev.iterator(split).toArray + .sortWith((x, y) => if (ascending) x._1 < y._1 else x._1 > y._1).iterator + } +} class MappedValuesRDD[K, V, U](prev: RDD[(K, V)], f: V => U) extends RDD[(K, U)](prev.context) { override def splits = prev.splits -- GitLab