aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-11-03 17:27:23 -0700
committerReynold Xin <rxin@databricks.com>2016-11-03 17:27:23 -0700
commitdc4c60098641cf64007e2f0e36378f000ad5f6b1 (patch)
treefad72496e3f06613484fdac6c8c13353c79eb838 /core/src
parentf22954ad49bf5a32c7b6d8487cd38ffe0da904ca (diff)
downloadspark-dc4c60098641cf64007e2f0e36378f000ad5f6b1.tar.gz
spark-dc4c60098641cf64007e2f0e36378f000ad5f6b1.tar.bz2
spark-dc4c60098641cf64007e2f0e36378f000ad5f6b1.zip
[SPARK-18138][DOCS] Document that Java 7, Python 2.6, Scala 2.10, Hadoop < 2.6 are deprecated in Spark 2.1.0
## What changes were proposed in this pull request? Document that Java 7, Python 2.6, Scala 2.10, Hadoop < 2.6 are deprecated in Spark 2.1.0. This does not actually implement any of the change in SPARK-18138, just peppers the documentation with notices about it. ## How was this patch tested? Doc build Author: Sean Owen <sowen@cloudera.com> Closes #15733 from srowen/SPARK-18138.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala12
1 files changed, 12 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 63478c88b0..9f0f607422 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -183,6 +183,8 @@ class SparkContext(config: SparkConf) extends Logging {
// log out Spark Version in Spark driver log
logInfo(s"Running Spark version $SPARK_VERSION")
+ warnDeprecatedVersions()
+
/* ------------------------------------------------------------------------------------- *
| Private variables. These variables keep the internal state of the context, and are |
| not accessible by the outside world. They're mutable since we want to initialize all |
@@ -346,6 +348,16 @@ class SparkContext(config: SparkConf) extends Logging {
value
}
+ private def warnDeprecatedVersions(): Unit = {
+ val javaVersion = System.getProperty("java.version").split("[+.\\-]+", 3)
+ if (javaVersion.length >= 2 && javaVersion(1).toInt == 7) {
+ logWarning("Support for Java 7 is deprecated as of Spark 2.0.0")
+ }
+ if (scala.util.Properties.releaseVersion.exists(_.startsWith("2.10"))) {
+ logWarning("Support for Scala 2.10 is deprecated as of Spark 2.1.0")
+ }
+ }
+
/** Control our logLevel. This overrides any user-defined log settings.
* @param logLevel The desired log level as a string.
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN