diff --git a/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala b/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala
index c8db3910c6eab23b621562f37eaeb87460203404..910eff9540a47d8fbd4f2158b72a33e3aac1040a 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala
@@ -105,16 +105,16 @@ private[mllib] object RandomRDD {
   def getPointIterator[T: ClassTag](partition: RandomRDDPartition[T]): Iterator[T] = {
     val generator = partition.generator.copy()
     generator.setSeed(partition.seed)
-    Array.fill(partition.size)(generator.nextValue()).toIterator
+    Iterator.fill(partition.size)(generator.nextValue())
   }
 
   // The RNG has to be reset every time the iterator is requested to guarantee same data
   // every time the content of the RDD is examined.
-  def getVectorIterator(partition: RandomRDDPartition[Double],
-                        vectorSize: Int): Iterator[Vector] = {
+  def getVectorIterator(
+      partition: RandomRDDPartition[Double],
+      vectorSize: Int): Iterator[Vector] = {
     val generator = partition.generator.copy()
     generator.setSeed(partition.seed)
-    Array.fill(partition.size)(new DenseVector(
-      (0 until vectorSize).map { _ => generator.nextValue() }.toArray)).toIterator
+    Iterator.fill(partition.size)(new DenseVector(Array.fill(vectorSize)(generator.nextValue())))
   }
 }