From 5d0f58b2eb8e48a95c4ab34bc89f7251d093f301 Mon Sep 17 00:00:00 2001 From: Michael Armbrust <michael@databricks.com> Date: Sat, 19 Apr 2014 15:06:04 -0700 Subject: [PATCH] Use scala deprecation instead of java. This gets rid of a warning when compiling core (since we were depending on a deprecated interface with a non-deprecated function). I also tested with javac, and this does the right thing when compiling java code. Author: Michael Armbrust <michael@databricks.com> Closes #452 from marmbrus/scalaDeprecation and squashes the following commits: f628b4d [Michael Armbrust] Use scala deprecation instead of java. --- .../main/scala/org/apache/spark/api/java/JavaSparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index cf30523ab5..bda9272b43 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -114,7 +114,7 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork * @deprecated As of Spark 1.0.0, defaultMinSplits is deprecated, use * {@link #defaultMinPartitions()} instead */ - @Deprecated + @deprecated("use defaultMinPartitions", "1.0.0") def defaultMinSplits: java.lang.Integer = sc.defaultMinSplits /** Default min number of partitions for Hadoop RDDs when not given by user */ -- GitLab