diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index a360fbefa492cac7ad99a6e978e011d880a1c55a..594f9375f76783e90dc83cb01e0f0328f6830347 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -232,6 +232,12 @@ class SparkSession(object):
         """Returns the underlying :class:`SparkContext`."""
         return self._sc
 
+    @property
+    @since(2.0)
+    def version(self):
+        """The version of Spark on which this application is running."""
+        return self._jsparkSession.version()
+
     @property
     @since(2.0)
     def conf(self):
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 1271d1c55bb38fa1e706330830664da3c9284024..1a40b7e2c13cbfa25ac70b5bd199f9beea26b3a6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -25,7 +25,7 @@ import scala.reflect.ClassTag
 import scala.reflect.runtime.universe.TypeTag
 import scala.util.control.NonFatal
 
-import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.{SPARK_VERSION, SparkConf, SparkContext}
 import org.apache.spark.annotation.{DeveloperApi, Experimental}
 import org.apache.spark.api.java.JavaRDD
 import org.apache.spark.internal.Logging
@@ -79,6 +79,13 @@ class SparkSession private(
 
   sparkContext.assertNotStopped()
 
+  /**
+   * The version of Spark on which this application is running.
+   *
+   * @since 2.0.0
+   */
+  def version: String = SPARK_VERSION
+
   /* ----------------------- *
    |  Session-related state  |
    * ----------------------- */