aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorBryan Cutler <cutlerb@gmail.com>2016-05-25 14:29:14 -0700
committerDavies Liu <davies.liu@gmail.com>2016-05-25 14:29:14 -0700
commit9c297df3d4d5fa4bbfdffdaad15f362586db384b (patch)
tree37a39991a44fb0fb0fcb3ea7a841d4b399800468 /examples
parent698ef762f80cf4c84bc7b7cf083aa97d44b87170 (diff)
downloadspark-9c297df3d4d5fa4bbfdffdaad15f362586db384b.tar.gz
spark-9c297df3d4d5fa4bbfdffdaad15f362586db384b.tar.bz2
spark-9c297df3d4d5fa4bbfdffdaad15f362586db384b.zip
[MINOR] [PYSPARK] [EXAMPLES] Changed examples to use SparkSession.sparkContext instead of _sc
## What changes were proposed in this pull request? Some PySpark examples need a SparkContext and get it by accessing _sc directly from the session. These examples should use the provided property `sparkContext` in `SparkSession` instead. ## How was this patch tested? Ran modified examples Author: Bryan Cutler <cutlerb@gmail.com> Closes #13303 from BryanCutler/pyspark-session-sparkContext-MINOR.
Diffstat (limited to 'examples')
-rwxr-xr-xexamples/src/main/python/als.py2
-rw-r--r--examples/src/main/python/avro_inputformat.py2
-rw-r--r--examples/src/main/python/parquet_inputformat.py2
-rwxr-xr-xexamples/src/main/python/pi.py2
-rwxr-xr-xexamples/src/main/python/transitive_closure.py2
5 files changed, 5 insertions, 5 deletions
diff --git a/examples/src/main/python/als.py b/examples/src/main/python/als.py
index 81562e20a9..80290e7de9 100755
--- a/examples/src/main/python/als.py
+++ b/examples/src/main/python/als.py
@@ -67,7 +67,7 @@ if __name__ == "__main__":
.appName("PythonALS")\
.getOrCreate()
- sc = spark._sc
+ sc = spark.sparkContext
M = int(sys.argv[1]) if len(sys.argv) > 1 else 100
U = int(sys.argv[2]) if len(sys.argv) > 2 else 500
diff --git a/examples/src/main/python/avro_inputformat.py b/examples/src/main/python/avro_inputformat.py
index 3f65e8f79a..4422f9e7a9 100644
--- a/examples/src/main/python/avro_inputformat.py
+++ b/examples/src/main/python/avro_inputformat.py
@@ -70,7 +70,7 @@ if __name__ == "__main__":
.appName("AvroKeyInputFormat")\
.getOrCreate()
- sc = spark._sc
+ sc = spark.sparkContext
conf = None
if len(sys.argv) == 3:
diff --git a/examples/src/main/python/parquet_inputformat.py b/examples/src/main/python/parquet_inputformat.py
index 2f09f4d573..29a1ac274e 100644
--- a/examples/src/main/python/parquet_inputformat.py
+++ b/examples/src/main/python/parquet_inputformat.py
@@ -53,7 +53,7 @@ if __name__ == "__main__":
.appName("ParquetInputFormat")\
.getOrCreate()
- sc = spark._sc
+ sc = spark.sparkContext
parquet_rdd = sc.newAPIHadoopFile(
path,
diff --git a/examples/src/main/python/pi.py b/examples/src/main/python/pi.py
index 5db03e4a21..b39d710540 100755
--- a/examples/src/main/python/pi.py
+++ b/examples/src/main/python/pi.py
@@ -32,7 +32,7 @@ if __name__ == "__main__":
.appName("PythonPi")\
.getOrCreate()
- sc = spark._sc
+ sc = spark.sparkContext
partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2
n = 100000 * partitions
diff --git a/examples/src/main/python/transitive_closure.py b/examples/src/main/python/transitive_closure.py
index 37c41dcd03..d88ea94e41 100755
--- a/examples/src/main/python/transitive_closure.py
+++ b/examples/src/main/python/transitive_closure.py
@@ -46,7 +46,7 @@ if __name__ == "__main__":
.appName("PythonTransitiveClosure")\
.getOrCreate()
- sc = spark._sc
+ sc = spark.sparkContext
partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2
tc = sc.parallelize(generateGraph(), partitions).cache()