aboutsummaryrefslogtreecommitdiff
path: root/graphx
diff options
context:
space:
mode:
authorAnkur Dave <ankurdave@gmail.com>2014-01-10 12:36:15 -0800
committerAnkur Dave <ankurdave@gmail.com>2014-01-10 12:36:15 -0800
commiteee9bc0958cd3b5efa312d4690f93ff259ca4b39 (patch)
tree872beee391ba3416289a534e108fb930b628bbf2 /graphx
parentc39ec3017f0c8dcf5546ab8b9153bfe237b2aa68 (diff)
downloadspark-eee9bc0958cd3b5efa312d4690f93ff259ca4b39.tar.gz
spark-eee9bc0958cd3b5efa312d4690f93ff259ca4b39.tar.bz2
spark-eee9bc0958cd3b5efa312d4690f93ff259ca4b39.zip
Remove commented-out perf files
Diffstat (limited to 'graphx')
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/perf/BagelTest.scala76
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/perf/SparkTest.scala75
2 files changed, 0 insertions, 151 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/perf/BagelTest.scala b/graphx/src/main/scala/org/apache/spark/graphx/perf/BagelTest.scala
deleted file mode 100644
index 81332e0800..0000000000
--- a/graphx/src/main/scala/org/apache/spark/graphx/perf/BagelTest.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-///// This file creates circular dependencies between examples bagle and graph
-
-// package org.apache.spark.graphx.perf
-
-// import org.apache.spark._
-// import org.apache.spark.SparkContext._
-// import org.apache.spark.bagel.Bagel
-
-// import org.apache.spark.examples.bagel
-// //import org.apache.spark.bagel.examples._
-// import org.apache.spark.graphx._
-
-
-// object BagelTest {
-
-// def main(args: Array[String]) {
-// val host = args(0)
-// val taskType = args(1)
-// val fname = args(2)
-// val options = args.drop(3).map { arg =>
-// arg.dropWhile(_ == '-').split('=') match {
-// case Array(opt, v) => (opt -> v)
-// case _ => throw new IllegalArgumentException("Invalid argument: " + arg)
-// }
-// }
-
-// System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
-// //System.setProperty("spark.shuffle.compress", "false")
-// System.setProperty("spark.kryo.registrator", "org.apache.spark.bagel.examples.PRKryoRegistrator")
-
-// var numIter = Int.MaxValue
-// var isDynamic = false
-// var tol:Float = 0.001F
-// var outFname = ""
-// var numVPart = 4
-// var numEPart = 4
-
-// options.foreach{
-// case ("numIter", v) => numIter = v.toInt
-// case ("dynamic", v) => isDynamic = v.toBoolean
-// case ("tol", v) => tol = v.toFloat
-// case ("output", v) => outFname = v
-// case ("numVPart", v) => numVPart = v.toInt
-// case ("numEPart", v) => numEPart = v.toInt
-// case (opt, _) => throw new IllegalArgumentException("Invalid option: " + opt)
-// }
-
-// val sc = new SparkContext(host, "PageRank(" + fname + ")")
-// val g = GraphLoader.textFile(sc, fname, a => 1.0F).withPartitioner(numVPart, numEPart).cache()
-// val startTime = System.currentTimeMillis
-
-// val numVertices = g.vertices.count()
-
-// val vertices = g.collectNeighborIds(EdgeDirection.Out).map { case (vid, neighbors) =>
-// (vid.toString, new PRVertex(1.0, neighbors.map(_.toString)))
-// }
-
-// // Do the computation
-// val epsilon = 0.01 / numVertices
-// val messages = sc.parallelize(Array[(String, PRMessage)]())
-// val utils = new PageRankUtils
-// val result =
-// Bagel.run(
-// sc, vertices, messages, combiner = new PRCombiner(),
-// numPartitions = numVPart)(
-// utils.computeWithCombiner(numVertices, epsilon, numIter))
-
-// println("Total rank: " + result.map{ case (id, r) => r.value }.reduce(_+_) )
-// if (!outFname.isEmpty) {
-// println("Saving pageranks of pages to " + outFname)
-// result.map{ case (id, r) => id + "\t" + r.value }.saveAsTextFile(outFname)
-// }
-// println("Runtime: " + ((System.currentTimeMillis - startTime)/1000.0) + " seconds")
-// sc.stop()
-// }
-// }
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/perf/SparkTest.scala b/graphx/src/main/scala/org/apache/spark/graphx/perf/SparkTest.scala
deleted file mode 100644
index 24262640ab..0000000000
--- a/graphx/src/main/scala/org/apache/spark/graphx/perf/SparkTest.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-///// This file creates circular dependencies between examples bagle and graph
-
-
-// package org.apache.spark.graphx.perf
-
-// import org.apache.spark._
-// import org.apache.spark.SparkContext._
-// import org.apache.spark.bagel.Bagel
-// import org.apache.spark.bagel.examples._
-// import org.apache.spark.graphx._
-
-
-// object SparkTest {
-
-// def main(args: Array[String]) {
-// val host = args(0)
-// val taskType = args(1)
-// val fname = args(2)
-// val options = args.drop(3).map { arg =>
-// arg.dropWhile(_ == '-').split('=') match {
-// case Array(opt, v) => (opt -> v)
-// case _ => throw new IllegalArgumentException("Invalid argument: " + arg)
-// }
-// }
-
-// System.setProperty("spark.serializer", "org.apache.spark.KryoSerializer")
-// //System.setProperty("spark.shuffle.compress", "false")
-// System.setProperty("spark.kryo.registrator", "spark.bagel.examples.PRKryoRegistrator")
-
-// var numIter = Int.MaxValue
-// var isDynamic = false
-// var tol:Float = 0.001F
-// var outFname = ""
-// var numVPart = 4
-// var numEPart = 4
-
-// options.foreach{
-// case ("numIter", v) => numIter = v.toInt
-// case ("dynamic", v) => isDynamic = v.toBoolean
-// case ("tol", v) => tol = v.toFloat
-// case ("output", v) => outFname = v
-// case ("numVPart", v) => numVPart = v.toInt
-// case ("numEPart", v) => numEPart = v.toInt
-// case (opt, _) => throw new IllegalArgumentException("Invalid option: " + opt)
-// }
-
-// val sc = new SparkContext(host, "PageRank(" + fname + ")")
-// val g = GraphLoader.textFile(sc, fname, a => 1.0F).withPartitioner(numVPart, numEPart).cache()
-// val startTime = System.currentTimeMillis
-
-// val numVertices = g.vertices.count()
-
-// val vertices = g.collectNeighborIds(EdgeDirection.Out).map { case (vid, neighbors) =>
-// (vid.toString, new PRVertex(1.0, neighbors.map(_.toString)))
-// }
-
-// // Do the computation
-// val epsilon = 0.01 / numVertices
-// val messages = sc.parallelize(Array[(String, PRMessage)]())
-// val utils = new PageRankUtils
-// val result =
-// Bagel.run(
-// sc, vertices, messages, combiner = new PRCombiner(),
-// numPartitions = numVPart)(
-// utils.computeWithCombiner(numVertices, epsilon, numIter))
-
-// println("Total rank: " + result.map{ case (id, r) => r.value }.reduce(_+_) )
-// if (!outFname.isEmpty) {
-// println("Saving pageranks of pages to " + outFname)
-// result.map{ case (id, r) => id + "\t" + r.value }.saveAsTextFile(outFname)
-// }
-// println("Runtime: " + ((System.currentTimeMillis - startTime)/1000.0) + " seconds")
-// sc.stop()
-// }
-// }