diff options
author | Michael Armbrust <michael@databricks.com> | 2014-04-19 15:06:04 -0700 |
---|---|---|
committer | Matei Zaharia <matei@databricks.com> | 2014-04-19 15:06:04 -0700 |
commit | 5d0f58b2eb8e48a95c4ab34bc89f7251d093f301 (patch) | |
tree | 6778f75f60209a2cba38e5f6c1d8d9c0104dfa64 | |
parent | 28238c81d9d81dba5d880cbd7ee910ec326bdc79 (diff) | |
download | spark-5d0f58b2eb8e48a95c4ab34bc89f7251d093f301.tar.gz spark-5d0f58b2eb8e48a95c4ab34bc89f7251d093f301.tar.bz2 spark-5d0f58b2eb8e48a95c4ab34bc89f7251d093f301.zip |
Use scala deprecation instead of java.
This gets rid of a warning when compiling core (since we were depending on a deprecated interface with a non-deprecated function). I also tested with javac, and this does the right thing when compiling java code.
Author: Michael Armbrust <michael@databricks.com>
Closes #452 from marmbrus/scalaDeprecation and squashes the following commits:
f628b4d [Michael Armbrust] Use scala deprecation instead of java.
-rw-r--r-- | core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index cf30523ab5..bda9272b43 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -114,7 +114,7 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork * @deprecated As of Spark 1.0.0, defaultMinSplits is deprecated, use * {@link #defaultMinPartitions()} instead */ - @Deprecated + @deprecated("use defaultMinPartitions", "1.0.0") def defaultMinSplits: java.lang.Integer = sc.defaultMinSplits /** Default min number of partitions for Hadoop RDDs when not given by user */ |