aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-04-22 00:42:16 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-04-22 00:42:16 -0700
commit83084d3b7bab09524dd85054c442abaf97d22fea (patch)
treea75e777fa1b6e8e03660b01b641ee1f9a2187640 /core
parentac164b79d12f5a269d9c05d04dced51a415e3b0e (diff)
downloadspark-83084d3b7bab09524dd85054c442abaf97d22fea.tar.gz
spark-83084d3b7bab09524dd85054c442abaf97d22fea.tar.bz2
spark-83084d3b7bab09524dd85054c442abaf97d22fea.zip
SPARK-1496: Have jarOfClass return Option[String]
A simple change, mostly had to change a bunch of example code. Author: Patrick Wendell <pwendell@gmail.com> Closes #438 from pwendell/jar-of-class and squashes the following commits: aa010ff [Patrick Wendell] SPARK-1496: Have jarOfClass return Option[String]
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala10
1 files changed, 5 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index aa49f32ecd..25ca650a3a 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1346,19 +1346,19 @@ object SparkContext extends Logging {
* Find the JAR from which a given class was loaded, to make it easy for users to pass
* their JARs to SparkContext.
*/
- def jarOfClass(cls: Class[_]): Seq[String] = {
+ def jarOfClass(cls: Class[_]): Option[String] = {
val uri = cls.getResource("/" + cls.getName.replace('.', '/') + ".class")
if (uri != null) {
val uriStr = uri.toString
if (uriStr.startsWith("jar:file:")) {
// URI will be of the form "jar:file:/path/foo.jar!/package/cls.class",
// so pull out the /path/foo.jar
- List(uriStr.substring("jar:file:".length, uriStr.indexOf('!')))
+ Some(uriStr.substring("jar:file:".length, uriStr.indexOf('!')))
} else {
- Nil
+ None
}
} else {
- Nil
+ None
}
}
@@ -1367,7 +1367,7 @@ object SparkContext extends Logging {
* to pass their JARs to SparkContext. In most cases you can call jarOfObject(this) in
* your driver program.
*/
- def jarOfObject(obj: AnyRef): Seq[String] = jarOfClass(obj.getClass)
+ def jarOfObject(obj: AnyRef): Option[String] = jarOfClass(obj.getClass)
/**
* Creates a modified version of a SparkConf with the parameters that can be passed separately