From cf3e9fd84dc64f8a57ecbcfdd6b22f5492d41bd7 Mon Sep 17 00:00:00 2001 From: Josh Rosen <joshrosen@apache.org> Date: Sat, 26 Jul 2014 00:54:05 -0700 Subject: [PATCH] [SPARK-1458] [PySpark] Expose sc.version in Java and PySpark Author: Josh Rosen <joshrosen@apache.org> Closes #1596 from JoshRosen/spark-1458 and squashes the following commits: fdbb0bf [Josh Rosen] Add SparkContext.version to Python & Java [SPARK-1458] --- .../scala/org/apache/spark/api/java/JavaSparkContext.scala | 3 +++ python/pyspark/context.py | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index 1e0493c485..a678355a1c 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -112,6 +112,9 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork def startTime: java.lang.Long = sc.startTime + /** The version of Spark on which this application is running. */ + def version: String = sc.version + /** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */ def defaultParallelism: java.lang.Integer = sc.defaultParallelism diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 024fb88187..bdf14ea0ee 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -216,6 +216,13 @@ class SparkContext(object): SparkContext._ensure_initialized() SparkContext._jvm.java.lang.System.setProperty(key, value) + @property + def version(self): + """ + The version of Spark on which this application is running. + """ + return self._jsc.version() + @property def defaultParallelism(self): """ -- GitLab