diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 63478c88b057bdf0f1c968600903c500e8632a58..9f0f6074229ddcce8dcffeed5388a5763edb7092 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -183,6 +183,8 @@ class SparkContext(config: SparkConf) extends Logging { // log out Spark Version in Spark driver log logInfo(s"Running Spark version $SPARK_VERSION") + warnDeprecatedVersions() + /* ------------------------------------------------------------------------------------- * | Private variables. These variables keep the internal state of the context, and are | | not accessible by the outside world. They're mutable since we want to initialize all | @@ -346,6 +348,16 @@ class SparkContext(config: SparkConf) extends Logging { value } + private def warnDeprecatedVersions(): Unit = { + val javaVersion = System.getProperty("java.version").split("[+.\\-]+", 3) + if (javaVersion.length >= 2 && javaVersion(1).toInt == 7) { + logWarning("Support for Java 7 is deprecated as of Spark 2.0.0") + } + if (scala.util.Properties.releaseVersion.exists(_.startsWith("2.10"))) { + logWarning("Support for Scala 2.10 is deprecated as of Spark 2.1.0") + } + } + /** Control our logLevel. This overrides any user-defined log settings. * @param logLevel The desired log level as a string. * Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN diff --git a/docs/building-spark.md b/docs/building-spark.md index ebe46a42a15c602673c9641ca464ebae25defebc..2b404bd3e116c9483b4ee9caa672507ce51c85cc 100644 --- a/docs/building-spark.md +++ b/docs/building-spark.md @@ -13,6 +13,7 @@ redirect_from: "building-with-maven.html" The Maven-based build is the build of reference for Apache Spark. Building Spark using Maven requires Maven 3.3.9 or newer and Java 7+. +Note that support for Java 7 is deprecated as of Spark 2.0.0 and may be removed in Spark 2.2.0. ### Setting up Maven's Memory Usage @@ -79,6 +80,9 @@ Because HDFS is not protocol-compatible across versions, if you want to read fro </tbody> </table> +Note that support for versions of Hadoop before 2.6 are deprecated as of Spark 2.1.0 and may be +removed in Spark 2.2.0. + You can enable the `yarn` profile and optionally set the `yarn.version` property if it is different from `hadoop.version`. Spark only supports YARN versions 2.2.0 and later. @@ -129,6 +133,8 @@ To produce a Spark package compiled with Scala 2.10, use the `-Dscala-2.10` prop ./dev/change-scala-version.sh 2.10 ./build/mvn -Pyarn -Phadoop-2.4 -Dscala-2.10 -DskipTests clean package + +Note that support for Scala 2.10 is deprecated as of Spark 2.1.0 and may be removed in Spark 2.2.0. ## Building submodules individually diff --git a/docs/index.md b/docs/index.md index a7a92f6c4f6d736ee7d974b0d85de3f772da127e..fe51439ae08d7999007f5c97765be5637b86652e 100644 --- a/docs/index.md +++ b/docs/index.md @@ -28,6 +28,10 @@ Spark runs on Java 7+, Python 2.6+/3.4+ and R 3.1+. For the Scala API, Spark {{s uses Scala {{site.SCALA_BINARY_VERSION}}. You will need to use a compatible Scala version ({{site.SCALA_BINARY_VERSION}}.x). +Note that support for Java 7 and Python 2.6 are deprecated as of Spark 2.0.0, and support for +Scala 2.10 and versions of Hadoop before 2.6 are deprecated as of Spark 2.1.0, and may be +removed in Spark 2.2.0. + # Running the Examples and Shell Spark comes with several sample programs. Scala, Java, Python and R examples are in the diff --git a/docs/programming-guide.md b/docs/programming-guide.md index 7516579ec6dbfefc156942a7bf489701be889851..b9a2110b602a0a7e03042099c5867bea00010657 100644 --- a/docs/programming-guide.md +++ b/docs/programming-guide.md @@ -59,6 +59,8 @@ Spark {{site.SPARK_VERSION}} works with Java 7 and higher. If you are using Java for concisely writing functions, otherwise you can use the classes in the [org.apache.spark.api.java.function](api/java/index.html?org/apache/spark/api/java/function/package-summary.html) package. +Note that support for Java 7 is deprecated as of Spark 2.0.0 and may be removed in Spark 2.2.0. + To write a Spark application in Java, you need to add a dependency on Spark. Spark is available through Maven Central at: groupId = org.apache.spark @@ -87,6 +89,8 @@ import org.apache.spark.SparkConf Spark {{site.SPARK_VERSION}} works with Python 2.6+ or Python 3.4+. It can use the standard CPython interpreter, so C libraries like NumPy can be used. It also works with PyPy 2.3+. +Note that support for Python 2.6 is deprecated as of Spark 2.0.0, and may be removed in Spark 2.2.0. + To run Spark applications in Python, use the `bin/spark-submit` script located in the Spark directory. This script will load Spark's Java/Scala libraries and allow you to submit applications to a cluster. You can also use `bin/pyspark` to launch an interactive Python shell. diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 1b2e199c395be889ec39c2b07f0dc692ee2e8eb7..2c2cf6a373bb765466b41b29b5a9069e5b28d447 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -22,6 +22,7 @@ import shutil import signal import sys import threading +import warnings from threading import RLock from tempfile import NamedTemporaryFile @@ -187,6 +188,9 @@ class SparkContext(object): self.pythonExec = os.environ.get("PYSPARK_PYTHON", 'python') self.pythonVer = "%d.%d" % sys.version_info[:2] + if sys.version_info < (2, 7): + warnings.warn("Support for Python 2.6 is deprecated as of Spark 2.0.0") + # Broadcast's __reduce__ method stores Broadcast instances here. # This allows other code to determine which Broadcast instances have # been pickled, so it can determine which Java broadcast objects to