diff --git a/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala b/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala index b843b4c629e84de946ca41d89c4637cdf1338afc..bcf138b5ee6d09f4fc15949e62e84e630d530413 100644 --- a/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala @@ -33,34 +33,29 @@ class FlatmapIteratorSuite extends FunSuite with LocalSparkContext { * info from the serializer, and allow old objects to be GC'd */ test("Flatmap Iterator to Disk") { - val sconf = new SparkConf().setMaster("local-cluster[1,1,512]") - .setAppName("iterator_to_disk_test") + val sconf = new SparkConf().setMaster("local").setAppName("iterator_to_disk_test") sc = new SparkContext(sconf) val expand_size = 100 val data = sc.parallelize((1 to 5).toSeq). flatMap( x => Stream.range(0, expand_size)) var persisted = data.persist(StorageLevel.DISK_ONLY) - println(persisted.count()) assert(persisted.count()===500) assert(persisted.filter(_==1).count()===5) } test("Flatmap Iterator to Memory") { - val sconf = new SparkConf().setMaster("local-cluster[1,1,512]") - .setAppName("iterator_to_disk_test") + val sconf = new SparkConf().setMaster("local").setAppName("iterator_to_disk_test") sc = new SparkContext(sconf) val expand_size = 100 val data = sc.parallelize((1 to 5).toSeq). flatMap(x => Stream.range(0, expand_size)) var persisted = data.persist(StorageLevel.MEMORY_ONLY) - println(persisted.count()) assert(persisted.count()===500) assert(persisted.filter(_==1).count()===5) } test("Serializer Reset") { - val sconf = new SparkConf().setMaster("local-cluster[1,1,512]") - .setAppName("serializer_reset_test") + val sconf = new SparkConf().setMaster("local").setAppName("serializer_reset_test") .set("spark.serializer.objectStreamReset", "10") sc = new SparkContext(sconf) val expand_size = 500