From 39c836e976fcae51568bed5ebab28e148383b5d4 Mon Sep 17 00:00:00 2001 From: Liwei Lin <lwlin7@gmail.com> Date: Wed, 13 Jul 2016 22:30:46 -0700 Subject: [PATCH] [SPARK-16503] SparkSession should provide Spark version ## What changes were proposed in this pull request? This patch enables SparkSession to provide spark version. ## How was this patch tested? Manual test: ``` scala> sc.version res0: String = 2.1.0-SNAPSHOT scala> spark.version res1: String = 2.1.0-SNAPSHOT ``` ``` >>> sc.version u'2.1.0-SNAPSHOT' >>> spark.version u'2.1.0-SNAPSHOT' ``` Author: Liwei Lin <lwlin7@gmail.com> Closes #14165 from lw-lin/add-version. --- python/pyspark/sql/session.py | 6 ++++++ .../main/scala/org/apache/spark/sql/SparkSession.scala | 9 ++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py index a360fbefa4..594f9375f7 100644 --- a/python/pyspark/sql/session.py +++ b/python/pyspark/sql/session.py @@ -232,6 +232,12 @@ class SparkSession(object): """Returns the underlying :class:`SparkContext`.""" return self._sc + @property + @since(2.0) + def version(self): + """The version of Spark on which this application is running.""" + return self._jsparkSession.version() + @property @since(2.0) def conf(self): diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 1271d1c55b..1a40b7e2c1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -25,7 +25,7 @@ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import scala.util.control.NonFatal -import org.apache.spark.{SparkConf, SparkContext} +import org.apache.spark.{SPARK_VERSION, SparkConf, SparkContext} import org.apache.spark.annotation.{DeveloperApi, Experimental} import org.apache.spark.api.java.JavaRDD import org.apache.spark.internal.Logging @@ -79,6 +79,13 @@ class SparkSession private( sparkContext.assertNotStopped() + /** + * The version of Spark on which this application is running. + * + * @since 2.0.0 + */ + def version: String = SPARK_VERSION + /* ----------------------- * | Session-related state | * ----------------------- */ -- GitLab