diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala index dbbe7a63d5531e74864f6c3c1f141188ea71d72d..f7cc614fc1d247a58b67b97a57ad639b82804a16 100644 --- a/core/src/main/scala/spark/SparkContext.scala +++ b/core/src/main/scala/spark/SparkContext.scala @@ -257,7 +257,7 @@ extends Logging { def defaultParallelism: Int = scheduler.defaultParallelism // Default min number of splits for Hadoop RDDs when not given by user - def defaultMinSplits: Int = Math.min(defaultParallelism, 2) + def defaultMinSplits: Int = math.min(defaultParallelism, 2) private var nextShuffleId = new AtomicInteger(0)