diff options
author | Nicholas Chammas <nicholas.chammas@gmail.com> | 2014-09-05 23:08:54 -0700 |
---|---|---|
committer | Reynold Xin <rxin@apache.org> | 2014-09-05 23:08:54 -0700 |
commit | 9422c4ee0eaf4a32d2ed7c96799feac2f5f79d40 (patch) | |
tree | 53000806a143eac041be4ad0f84a137f93e43bd3 /examples/src/main/python/hbase_outputformat.py | |
parent | 19f61c165932059e7ce156da2c71429fa8dc27f0 (diff) | |
download | spark-9422c4ee0eaf4a32d2ed7c96799feac2f5f79d40.tar.gz spark-9422c4ee0eaf4a32d2ed7c96799feac2f5f79d40.tar.bz2 spark-9422c4ee0eaf4a32d2ed7c96799feac2f5f79d40.zip |
[SPARK-3361] Expand PEP 8 checks to include EC2 script and Python examples
This PR resolves [SPARK-3361](https://issues.apache.org/jira/browse/SPARK-3361) by expanding the PEP 8 checks to cover the remaining Python code base:
* The EC2 script
* All Python / PySpark examples
Author: Nicholas Chammas <nicholas.chammas@gmail.com>
Closes #2297 from nchammas/pep8-rulez and squashes the following commits:
1e5ac9a [Nicholas Chammas] PEP 8 fixes to Python examples
c3dbeff [Nicholas Chammas] PEP 8 fixes to EC2 script
65ef6e8 [Nicholas Chammas] expand PEP 8 checks
Diffstat (limited to 'examples/src/main/python/hbase_outputformat.py')
-rw-r--r-- | examples/src/main/python/hbase_outputformat.py | 18 |
1 files changed, 11 insertions, 7 deletions
diff --git a/examples/src/main/python/hbase_outputformat.py b/examples/src/main/python/hbase_outputformat.py index 49bbc5aebd..abb425b1f8 100644 --- a/examples/src/main/python/hbase_outputformat.py +++ b/examples/src/main/python/hbase_outputformat.py @@ -44,8 +44,10 @@ if __name__ == "__main__": Usage: hbase_outputformat <host> <table> <row> <family> <qualifier> <value> Run with example jar: - ./bin/spark-submit --driver-class-path /path/to/example/jar /path/to/examples/hbase_outputformat.py <args> - Assumes you have created <table> with column family <family> in HBase running on <host> already + ./bin/spark-submit --driver-class-path /path/to/example/jar \ + /path/to/examples/hbase_outputformat.py <args> + Assumes you have created <table> with column family <family> in HBase + running on <host> already """ exit(-1) @@ -55,13 +57,15 @@ if __name__ == "__main__": conf = {"hbase.zookeeper.quorum": host, "hbase.mapred.outputtable": table, - "mapreduce.outputformat.class" : "org.apache.hadoop.hbase.mapreduce.TableOutputFormat", - "mapreduce.job.output.key.class" : "org.apache.hadoop.hbase.io.ImmutableBytesWritable", - "mapreduce.job.output.value.class" : "org.apache.hadoop.io.Writable"} + "mapreduce.outputformat.class": "org.apache.hadoop.hbase.mapreduce.TableOutputFormat", + "mapreduce.job.output.key.class": "org.apache.hadoop.hbase.io.ImmutableBytesWritable", + "mapreduce.job.output.value.class": "org.apache.hadoop.io.Writable"} + keyConv = "org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter" + valueConv = "org.apache.spark.examples.pythonconverters.StringListToPutConverter" sc.parallelize([sys.argv[3:]]).map(lambda x: (x[0], x)).saveAsNewAPIHadoopDataset( conf=conf, - keyConverter="org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter", - valueConverter="org.apache.spark.examples.pythonconverters.StringListToPutConverter") + keyConverter=keyConv, + valueConverter=valueConv) sc.stop() |