aboutsummaryrefslogtreecommitdiff
path: root/graphx
diff options
context:
space:
mode:
authorPrashant Sharma <prashant.s@imaginea.com>2014-05-08 10:23:05 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-08 10:23:05 -0700
commit44dd57fb66bb676d753ad8d9757f9f4c03364113 (patch)
tree755cdff1c17a29b24837a6405fed5eb46733769e /graphx
parent19c8fb02bc2c2f76c3c45bfff4b8d093be9d7c66 (diff)
downloadspark-44dd57fb66bb676d753ad8d9757f9f4c03364113.tar.gz
spark-44dd57fb66bb676d753ad8d9757f9f4c03364113.tar.bz2
spark-44dd57fb66bb676d753ad8d9757f9f4c03364113.zip
SPARK-1565, update examples to be used with spark-submit script.
Commit for initial feedback, basically I am curious if we should prompt user for providing args esp. when its mandatory. And can we skip if they are not ? Also few other things that did not work like `bin/spark-submit examples/target/scala-2.10/spark-examples-1.0.0-SNAPSHOT-hadoop1.0.4.jar --class org.apache.spark.examples.SparkALS --arg 100 500 10 5 2` Not all the args get passed properly, may be I have messed up something will try to sort it out hopefully. Author: Prashant Sharma <prashant.s@imaginea.com> Closes #552 from ScrapCodes/SPARK-1565/update-examples and squashes the following commits: 669dd23 [Prashant Sharma] Review comments 2727e70 [Prashant Sharma] SPARK-1565, update examples to be used with spark-submit script.
Diffstat (limited to 'graphx')
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala18
1 files changed, 11 insertions, 7 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala
index fa533a512d..d901d4fe22 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala
@@ -27,10 +27,14 @@ import org.apache.spark.graphx.PartitionStrategy._
object Analytics extends Logging {
def main(args: Array[String]): Unit = {
- val host = args(0)
- val taskType = args(1)
- val fname = args(2)
- val options = args.drop(3).map { arg =>
+ if (args.length < 2) {
+ System.err.println("Usage: Analytics <taskType> <file> [other options]")
+ System.exit(1)
+ }
+
+ val taskType = args(0)
+ val fname = args(1)
+ val options = args.drop(2).map { arg =>
arg.dropWhile(_ == '-').split('=') match {
case Array(opt, v) => (opt -> v)
case _ => throw new IllegalArgumentException("Invalid argument: " + arg)
@@ -71,7 +75,7 @@ object Analytics extends Logging {
println("| PageRank |")
println("======================================")
- val sc = new SparkContext(host, "PageRank(" + fname + ")", conf)
+ val sc = new SparkContext(conf.setAppName("PageRank(" + fname + ")"))
val unpartitionedGraph = GraphLoader.edgeListFile(sc, fname,
minEdgePartitions = numEPart).cache()
@@ -115,7 +119,7 @@ object Analytics extends Logging {
println("| Connected Components |")
println("======================================")
- val sc = new SparkContext(host, "ConnectedComponents(" + fname + ")", conf)
+ val sc = new SparkContext(conf.setAppName("ConnectedComponents(" + fname + ")"))
val unpartitionedGraph = GraphLoader.edgeListFile(sc, fname,
minEdgePartitions = numEPart).cache()
val graph = partitionStrategy.foldLeft(unpartitionedGraph)(_.partitionBy(_))
@@ -137,7 +141,7 @@ object Analytics extends Logging {
println("======================================")
println("| Triangle Count |")
println("======================================")
- val sc = new SparkContext(host, "TriangleCount(" + fname + ")", conf)
+ val sc = new SparkContext(conf.setAppName("TriangleCount(" + fname + ")"))
val graph = GraphLoader.edgeListFile(sc, fname, canonicalOrientation = true,
minEdgePartitions = numEPart).partitionBy(partitionStrategy).cache()
val triangles = TriangleCount.run(graph)