aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGuoQiang Li <witgo@qq.com>2014-09-06 15:08:43 -0700
committerJosh Rosen <joshrosen@apache.org>2014-09-06 15:08:43 -0700
commit21a1e1bb893512b2f68598ab0c0ec8c33e8d9909 (patch)
tree3f54ef947532e66a8d1c525e9cc40522a5aa399a
parent607ae39c22947dad8e65cbcec310367925f62eba (diff)
downloadspark-21a1e1bb893512b2f68598ab0c0ec8c33e8d9909.tar.gz
spark-21a1e1bb893512b2f68598ab0c0ec8c33e8d9909.tar.bz2
spark-21a1e1bb893512b2f68598ab0c0ec8c33e8d9909.zip
[SPARK-3273][SPARK-3301]We should read the version information from the same place
Author: GuoQiang Li <witgo@qq.com> Closes #2175 from witgo/SPARK-3273 and squashes the following commits: cf9c65a [GuoQiang Li] We should read the version information from the same place 2a44e2f [GuoQiang Li] The spark version in the welcome message of pyspark is not correct
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/package.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala5
-rw-r--r--python/pyspark/shell.py4
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala6
6 files changed, 14 insertions, 10 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 6eaf679476..24d1a8f9ec 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -49,6 +49,7 @@ import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SparkD
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
import org.apache.spark.scheduler.local.LocalBackend
import org.apache.spark.storage._
+import org.apache.spark.SPARK_VERSION
import org.apache.spark.ui.SparkUI
import org.apache.spark.util.{CallSite, ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedWeakValueHashMap, Utils}
@@ -825,7 +826,7 @@ class SparkContext(config: SparkConf) extends Logging {
}
/** The version of Spark on which this application is running. */
- def version = SparkContext.SPARK_VERSION
+ def version = SPARK_VERSION
/**
* Return a map from the slave to the max memory available for caching and the remaining
@@ -1297,8 +1298,6 @@ class SparkContext(config: SparkConf) extends Logging {
*/
object SparkContext extends Logging {
- private[spark] val SPARK_VERSION = "1.2.0-SNAPSHOT"
-
private[spark] val SPARK_JOB_DESCRIPTION = "spark.job.description"
private[spark] val SPARK_JOB_GROUP_ID = "spark.jobGroup.id"
diff --git a/core/src/main/scala/org/apache/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala
index 5cdbc306e5..e2fc9c6499 100644
--- a/core/src/main/scala/org/apache/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -44,4 +44,5 @@ package org.apache
package object spark {
// For package docs only
+ val SPARK_VERSION = "1.2.0-SNAPSHOT"
}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
index 4b99f63044..64b32ae0ed 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
@@ -29,6 +29,7 @@ import org.json4s.jackson.JsonMethods._
import org.apache.spark.{Logging, SparkConf, SparkContext}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.io.CompressionCodec
+import org.apache.spark.SPARK_VERSION
import org.apache.spark.util.{FileLogger, JsonProtocol, Utils}
/**
@@ -86,7 +87,7 @@ private[spark] class EventLoggingListener(
sparkConf.get("spark.io.compression.codec", CompressionCodec.DEFAULT_COMPRESSION_CODEC)
logger.newFile(COMPRESSION_CODEC_PREFIX + codec)
}
- logger.newFile(SPARK_VERSION_PREFIX + SparkContext.SPARK_VERSION)
+ logger.newFile(SPARK_VERSION_PREFIX + SPARK_VERSION)
logger.newFile(LOG_PREFIX + logger.fileIndex)
}
diff --git a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
index fead883793..e5315bc93e 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
@@ -28,6 +28,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.io.CompressionCodec
+import org.apache.spark.SPARK_VERSION
import org.apache.spark.util.{JsonProtocol, Utils}
import java.io.File
@@ -196,7 +197,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
def assertInfoCorrect(info: EventLoggingInfo, loggerStopped: Boolean) {
assert(info.logPaths.size > 0)
- assert(info.sparkVersion === SparkContext.SPARK_VERSION)
+ assert(info.sparkVersion === SPARK_VERSION)
assert(info.compressionCodec.isDefined === compressionCodec.isDefined)
info.compressionCodec.foreach { codec =>
assert(compressionCodec.isDefined)
@@ -381,7 +382,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
private def assertSparkVersionIsValid(logFiles: Array[FileStatus]) {
val file = logFiles.map(_.getPath.getName).find(EventLoggingListener.isSparkVersionFile)
assert(file.isDefined)
- assert(EventLoggingListener.parseSparkVersion(file.get) === SparkContext.SPARK_VERSION)
+ assert(EventLoggingListener.parseSparkVersion(file.get) === SPARK_VERSION)
}
private def assertCompressionCodecIsValid(logFiles: Array[FileStatus], compressionCodec: String) {
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index fde3c29e5e..89cf76920e 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -49,9 +49,9 @@ print("""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
- /__ / .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
+ /__ / .__/\_,_/_/ /_/\_\ version %s
/_/
-""")
+""" % sc.version)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
index 910b31d209..7667a9c119 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala
@@ -14,6 +14,8 @@ import scala.reflect.internal.util.Position
import scala.util.control.Exception.ignoring
import scala.tools.nsc.util.stackTraceString
+import org.apache.spark.SPARK_VERSION
+
/**
* Machinery for the asynchronous initialization of the repl.
*/
@@ -26,9 +28,9 @@ trait SparkILoopInit {
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
- /___/ .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
+ /___/ .__/\_,_/_/ /_/\_\ version %s
/_/
-""")
+""".format(SPARK_VERSION))
import Properties._
val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
versionString, javaVmName, javaVersion)