diff --git a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index a9652d7e7d0b0f555f043897556dab0ec35b14d2..53102b9f1c93616d5c1ea4943411c167f835aa87 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -212,14 +212,15 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match
       i
     }
 
-    val d = sc.parallelize(0 to 1e4.toInt, 64).map(w)
+    val numSlices = 16
+    val d = sc.parallelize(0 to 1e3.toInt, numSlices).map(w)
     d.count()
     sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
     listener.stageInfos.size should be (1)
 
     val d2 = d.map { i => w(i) -> i * 2 }.setName("shuffle input 1")
     val d3 = d.map { i => w(i) -> (0 to (i % 5)) }.setName("shuffle input 2")
-    val d4 = d2.cogroup(d3, 64).map { case (k, (v1, v2)) =>
+    val d4 = d2.cogroup(d3, numSlices).map { case (k, (v1, v2)) =>
       w(k) -> (v1.size, v2.size)
     }
     d4.setName("A Cogroup")
@@ -258,8 +259,8 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match
         if (stageInfo.rddInfos.exists(_.name == d4.name)) {
           taskMetrics.shuffleReadMetrics should be ('defined)
           val sm = taskMetrics.shuffleReadMetrics.get
-          sm.totalBlocksFetched should be (128)
-          sm.localBlocksFetched should be (128)
+          sm.totalBlocksFetched should be (2*numSlices)
+          sm.localBlocksFetched should be (2*numSlices)
           sm.remoteBlocksFetched should be (0)
           sm.remoteBytesRead should be (0L)
         }