diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index cf30523ab523e7da3f4c8755e902b06476f2b3b0..bda9272b43393c564fef1c6dd6bd7290746d9148 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -114,7 +114,7 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
    * @deprecated As of Spark 1.0.0, defaultMinSplits is deprecated, use
    *            {@link #defaultMinPartitions()} instead
    */
-  @Deprecated
+  @deprecated("use defaultMinPartitions", "1.0.0")
   def defaultMinSplits: java.lang.Integer = sc.defaultMinSplits
 
   /** Default min number of partitions for Hadoop RDDs when not given by user */