aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorArtjom-Metro <Artjom-Metro@users.noreply.github.com>2014-07-10 16:03:30 -0700
committerReynold Xin <rxin@apache.org>2014-07-10 16:03:30 -0700
commitae8ca4dfbacd5a5197fb41722607ad99c190f768 (patch)
treed33523d84ad1f26e663f944ac95f57e90d64204f /examples
parent2dd67248503306bb08946b1796821e9f9ed4d00e (diff)
downloadspark-ae8ca4dfbacd5a5197fb41722607ad99c190f768.tar.gz
spark-ae8ca4dfbacd5a5197fb41722607ad99c190f768.tar.bz2
spark-ae8ca4dfbacd5a5197fb41722607ad99c190f768.zip
SPARK-2427: Fix Scala examples that use the wrong command line arguments index
The Scala examples HBaseTest and HdfsTest don't use the correct indexes for the command line arguments. This due to to the fix of JIRA 1565, where these examples were not correctly adapted to the new usage of the submit script. Author: Artjom-Metro <Artjom-Metro@users.noreply.github.com> Author: Artjom-Metro <artjom31415@googlemail.com> Closes #1353 from Artjom-Metro/fix_examples and squashes the following commits: 6111801 [Artjom-Metro] Reduce the default number of iterations cfaa73c [Artjom-Metro] Fix some examples that use the wrong index to access the command line arguments
Diffstat (limited to 'examples')
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala10
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala6
3 files changed, 16 insertions, 6 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala b/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
index 4893b017ed..822673347b 100644
--- a/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
@@ -31,12 +31,12 @@ object HBaseTest {
val conf = HBaseConfiguration.create()
// Other options for configuring scan behavior are available. More information available at
// http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
- conf.set(TableInputFormat.INPUT_TABLE, args(1))
+ conf.set(TableInputFormat.INPUT_TABLE, args(0))
// Initialize hBase table if necessary
val admin = new HBaseAdmin(conf)
- if(!admin.isTableAvailable(args(1))) {
- val tableDesc = new HTableDescriptor(args(1))
+ if (!admin.isTableAvailable(args(0))) {
+ val tableDesc = new HTableDescriptor(args(0))
admin.createTable(tableDesc)
}
diff --git a/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala b/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
index 331de3ad1e..ed2b38e2ca 100644
--- a/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
@@ -19,16 +19,22 @@ package org.apache.spark.examples
import org.apache.spark._
+
object HdfsTest {
+
+ /** Usage: HdfsTest [file] */
def main(args: Array[String]) {
+ if (args.length < 1) {
+ System.err.println("Usage: HdfsTest <file>")
+ System.exit(1)
+ }
val sparkConf = new SparkConf().setAppName("HdfsTest")
val sc = new SparkContext(sparkConf)
- val file = sc.textFile(args(1))
+ val file = sc.textFile(args(0))
val mapped = file.map(s => s.length).cache()
for (iter <- 1 to 10) {
val start = System.currentTimeMillis()
for (x <- mapped) { x + 2 }
- // println("Processing: " + x)
val end = System.currentTimeMillis()
println("Iteration " + iter + " took " + (end-start) + " ms")
}
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
index 40b36c779a..4c7e006da0 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
@@ -31,8 +31,12 @@ import org.apache.spark.{SparkConf, SparkContext}
*/
object SparkPageRank {
def main(args: Array[String]) {
+ if (args.length < 1) {
+ System.err.println("Usage: SparkPageRank <file> <iter>")
+ System.exit(1)
+ }
val sparkConf = new SparkConf().setAppName("PageRank")
- var iters = args(1).toInt
+ val iters = if (args.length > 0) args(1).toInt else 10
val ctx = new SparkContext(sparkConf)
val lines = ctx.textFile(args(0), 1)
val links = lines.map{ s =>