diff options
author | Reynold Xin <rxin@cs.berkeley.edu> | 2013-05-09 14:20:01 -0700 |
---|---|---|
committer | Reynold Xin <rxin@cs.berkeley.edu> | 2013-05-09 14:20:01 -0700 |
commit | 012c9e5ab072239e07202abe4775b434be6e32b9 (patch) | |
tree | dda701387d3725199faf4c3a56dd57c30ca5f364 /examples/src | |
parent | 42bbe89f04383cd6cc182fc3627ce55e11ede33f (diff) | |
download | spark-012c9e5ab072239e07202abe4775b434be6e32b9.tar.gz spark-012c9e5ab072239e07202abe4775b434be6e32b9.tar.bz2 spark-012c9e5ab072239e07202abe4775b434be6e32b9.zip |
Revert "Merge pull request #596 from esjewett/master" because the
dependency on hbase introduces netty-3.2.2 which conflicts with
netty-3.5.3 already in Spark. This caused multiple test failures.
This reverts commit 0f1b7a06e1f6782711170234f105f1b277e3b04c, reversing
changes made to aacca1b8a85bd073ce185a06d6470b070761b2f4.
Diffstat (limited to 'examples/src')
-rw-r--r-- | examples/src/main/scala/spark/examples/HBaseTest.scala | 35 |
1 files changed, 0 insertions, 35 deletions
diff --git a/examples/src/main/scala/spark/examples/HBaseTest.scala b/examples/src/main/scala/spark/examples/HBaseTest.scala deleted file mode 100644 index 9bad876860..0000000000 --- a/examples/src/main/scala/spark/examples/HBaseTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -package spark.examples - -import spark._ -import spark.rdd.NewHadoopRDD -import org.apache.hadoop.hbase.{HBaseConfiguration, HTableDescriptor} -import org.apache.hadoop.hbase.client.HBaseAdmin -import org.apache.hadoop.hbase.mapreduce.TableInputFormat - -object HBaseTest { - def main(args: Array[String]) { - val sc = new SparkContext(args(0), "HBaseTest", - System.getenv("SPARK_HOME"), Seq(System.getenv("SPARK_EXAMPLES_JAR"))) - - val conf = HBaseConfiguration.create() - - // Other options for configuring scan behavior are available. More information available at - // http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html - conf.set(TableInputFormat.INPUT_TABLE, args(1)) - - // Initialize hBase table if necessary - val admin = new HBaseAdmin(conf) - if(!admin.isTableAvailable(args(1))) { - val tableDesc = new HTableDescriptor(args(1)) - admin.createTable(tableDesc) - } - - val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], - classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], - classOf[org.apache.hadoop.hbase.client.Result]) - - hBaseRDD.count() - - System.exit(0) - } -} |