Skip to content
Snippets Groups Projects
Commit cf3e9fd8 authored by Josh Rosen's avatar Josh Rosen Committed by Matei Zaharia
Browse files

[SPARK-1458] [PySpark] Expose sc.version in Java and PySpark

Author: Josh Rosen <joshrosen@apache.org>

Closes #1596 from JoshRosen/spark-1458 and squashes the following commits:

fdbb0bf [Josh Rosen] Add SparkContext.version to Python & Java [SPARK-1458]
parent 89047912
No related branches found
No related tags found
No related merge requests found
...@@ -112,6 +112,9 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork ...@@ -112,6 +112,9 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
def startTime: java.lang.Long = sc.startTime def startTime: java.lang.Long = sc.startTime
/** The version of Spark on which this application is running. */
def version: String = sc.version
/** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */ /** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */
def defaultParallelism: java.lang.Integer = sc.defaultParallelism def defaultParallelism: java.lang.Integer = sc.defaultParallelism
......
...@@ -216,6 +216,13 @@ class SparkContext(object): ...@@ -216,6 +216,13 @@ class SparkContext(object):
SparkContext._ensure_initialized() SparkContext._ensure_initialized()
SparkContext._jvm.java.lang.System.setProperty(key, value) SparkContext._jvm.java.lang.System.setProperty(key, value)
@property
def version(self):
"""
The version of Spark on which this application is running.
"""
return self._jsc.version()
@property @property
def defaultParallelism(self): def defaultParallelism(self):
""" """
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment