aboutsummaryrefslogtreecommitdiff
path: root/graphx
diff options
context:
space:
mode:
Diffstat (limited to 'graphx')
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala18
1 files changed, 11 insertions, 7 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala
index fa533a512d..d901d4fe22 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala
@@ -27,10 +27,14 @@ import org.apache.spark.graphx.PartitionStrategy._
object Analytics extends Logging {
def main(args: Array[String]): Unit = {
- val host = args(0)
- val taskType = args(1)
- val fname = args(2)
- val options = args.drop(3).map { arg =>
+ if (args.length < 2) {
+ System.err.println("Usage: Analytics <taskType> <file> [other options]")
+ System.exit(1)
+ }
+
+ val taskType = args(0)
+ val fname = args(1)
+ val options = args.drop(2).map { arg =>
arg.dropWhile(_ == '-').split('=') match {
case Array(opt, v) => (opt -> v)
case _ => throw new IllegalArgumentException("Invalid argument: " + arg)
@@ -71,7 +75,7 @@ object Analytics extends Logging {
println("| PageRank |")
println("======================================")
- val sc = new SparkContext(host, "PageRank(" + fname + ")", conf)
+ val sc = new SparkContext(conf.setAppName("PageRank(" + fname + ")"))
val unpartitionedGraph = GraphLoader.edgeListFile(sc, fname,
minEdgePartitions = numEPart).cache()
@@ -115,7 +119,7 @@ object Analytics extends Logging {
println("| Connected Components |")
println("======================================")
- val sc = new SparkContext(host, "ConnectedComponents(" + fname + ")", conf)
+ val sc = new SparkContext(conf.setAppName("ConnectedComponents(" + fname + ")"))
val unpartitionedGraph = GraphLoader.edgeListFile(sc, fname,
minEdgePartitions = numEPart).cache()
val graph = partitionStrategy.foldLeft(unpartitionedGraph)(_.partitionBy(_))
@@ -137,7 +141,7 @@ object Analytics extends Logging {
println("======================================")
println("| Triangle Count |")
println("======================================")
- val sc = new SparkContext(host, "TriangleCount(" + fname + ")", conf)
+ val sc = new SparkContext(conf.setAppName("TriangleCount(" + fname + ")"))
val graph = GraphLoader.edgeListFile(sc, fname, canonicalOrientation = true,
minEdgePartitions = numEPart).partitionBy(partitionStrategy).cache()
val triangles = TriangleCount.run(graph)