diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 2110d3d770d5d4f22531900e68d8ffa6856afb4d..a0086e18435b6e39bd54426e1876063e6ba8a0f1 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -51,18 +51,21 @@ class DistributedSuite extends SparkFunSuite with Matchers with LocalSparkContex
   }
 
   test("local-cluster format") {
-    sc = new SparkContext("local-cluster[2,1,1024]", "test")
-    assert(sc.parallelize(1 to 2, 2).count() == 2)
-    resetSparkContext()
-    sc = new SparkContext("local-cluster[2 , 1 , 1024]", "test")
-    assert(sc.parallelize(1 to 2, 2).count() == 2)
-    resetSparkContext()
-    sc = new SparkContext("local-cluster[2, 1, 1024]", "test")
-    assert(sc.parallelize(1 to 2, 2).count() == 2)
-    resetSparkContext()
-    sc = new SparkContext("local-cluster[ 2, 1, 1024 ]", "test")
-    assert(sc.parallelize(1 to 2, 2).count() == 2)
-    resetSparkContext()
+    import SparkMasterRegex._
+
+    val masterStrings = Seq(
+      "local-cluster[2,1,1024]",
+      "local-cluster[2 , 1 , 1024]",
+      "local-cluster[2, 1, 1024]",
+      "local-cluster[ 2, 1, 1024 ]"
+    )
+
+    masterStrings.foreach {
+      case LOCAL_CLUSTER_REGEX(numSlaves, coresPerSlave, memoryPerSlave) =>
+        assert(numSlaves.toInt == 2)
+        assert(coresPerSlave.toInt == 1)
+        assert(memoryPerSlave.toInt == 1024)
+    }
   }
 
   test("simple groupByKey") {