diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index 01433ca2efc1423406c6e9936e7a9d06a499a3af..f1aebbcd39638e151b44a34309c4cbf0a07d0949 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -774,6 +774,16 @@ class JavaSparkContext(val sc: SparkContext) /** Cancel all jobs that have been scheduled or are running. */ def cancelAllJobs(): Unit = sc.cancelAllJobs() + + /** + * Returns an Java map of JavaRDDs that have marked themselves as persistent via cache() call. + * Note that this does not necessarily mean the caching or computation was successful. + */ + def getPersistentRDDs: JMap[java.lang.Integer, JavaRDD[_]] = { + sc.getPersistentRDDs.mapValues(s => JavaRDD.fromRDD(s)) + .asJava.asInstanceOf[JMap[java.lang.Integer, JavaRDD[_]]] + } + } object JavaSparkContext { diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java index 8117ad9e606419c58be0fea23e293d9b6e507466..e6a4ab7550c2a52c229dbdef88524ffadbd8499a 100644 --- a/core/src/test/java/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java @@ -1811,4 +1811,16 @@ public class JavaAPISuite implements Serializable { conf.get("spark.kryo.classesToRegister")); } + @Test + public void testGetPersistentRDDs() { + java.util.Map<Integer, JavaRDD<?>> cachedRddsMap = sc.getPersistentRDDs(); + Assert.assertTrue(cachedRddsMap.isEmpty()); + JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("a", "b")).setName("RDD1").cache(); + JavaRDD<String> rdd2 = sc.parallelize(Arrays.asList("c", "d")).setName("RDD2").cache(); + cachedRddsMap = sc.getPersistentRDDs(); + Assert.assertEquals(2, cachedRddsMap.size()); + Assert.assertEquals("RDD1", cachedRddsMap.get(0).name()); + Assert.assertEquals("RDD2", cachedRddsMap.get(1).name()); + } + }