diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala index 0103f6c6ab6781df36bc1fc71c0e6102c2eb4bab..55a37f8c944b2091c444d60685b09352a754c052 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala @@ -425,11 +425,6 @@ private[spark] object PythonRDD extends Logging { iter.foreach(write) } - /** Create an RDD that has no partitions or elements. */ - def emptyRDD[T](sc: JavaSparkContext): JavaRDD[T] = { - sc.emptyRDD[T] - } - /** * Create an RDD from a path using [[org.apache.hadoop.mapred.SequenceFileInputFormat]], * key and value class.