From 9c297df3d4d5fa4bbfdffdaad15f362586db384b Mon Sep 17 00:00:00 2001
From: Bryan Cutler <cutlerb@gmail.com>
Date: Wed, 25 May 2016 14:29:14 -0700
Subject: [PATCH] [MINOR] [PYSPARK] [EXAMPLES] Changed examples to use
 SparkSession.sparkContext instead of _sc

## What changes were proposed in this pull request?

Some PySpark examples need a SparkContext and get it by accessing _sc directly from the session.  These examples should use the provided property `sparkContext` in `SparkSession` instead.

## How was this patch tested?
Ran modified examples

Author: Bryan Cutler <cutlerb@gmail.com>

Closes #13303 from BryanCutler/pyspark-session-sparkContext-MINOR.
---
 examples/src/main/python/als.py                 | 2 +-
 examples/src/main/python/avro_inputformat.py    | 2 +-
 examples/src/main/python/parquet_inputformat.py | 2 +-
 examples/src/main/python/pi.py                  | 2 +-
 examples/src/main/python/transitive_closure.py  | 2 +-
 5 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/examples/src/main/python/als.py b/examples/src/main/python/als.py
index 81562e20a9..80290e7de9 100755
--- a/examples/src/main/python/als.py
+++ b/examples/src/main/python/als.py
@@ -67,7 +67,7 @@ if __name__ == "__main__":
         .appName("PythonALS")\
         .getOrCreate()
 
-    sc = spark._sc
+    sc = spark.sparkContext
 
     M = int(sys.argv[1]) if len(sys.argv) > 1 else 100
     U = int(sys.argv[2]) if len(sys.argv) > 2 else 500
diff --git a/examples/src/main/python/avro_inputformat.py b/examples/src/main/python/avro_inputformat.py
index 3f65e8f79a..4422f9e7a9 100644
--- a/examples/src/main/python/avro_inputformat.py
+++ b/examples/src/main/python/avro_inputformat.py
@@ -70,7 +70,7 @@ if __name__ == "__main__":
         .appName("AvroKeyInputFormat")\
         .getOrCreate()
 
-    sc = spark._sc
+    sc = spark.sparkContext
 
     conf = None
     if len(sys.argv) == 3:
diff --git a/examples/src/main/python/parquet_inputformat.py b/examples/src/main/python/parquet_inputformat.py
index 2f09f4d573..29a1ac274e 100644
--- a/examples/src/main/python/parquet_inputformat.py
+++ b/examples/src/main/python/parquet_inputformat.py
@@ -53,7 +53,7 @@ if __name__ == "__main__":
         .appName("ParquetInputFormat")\
         .getOrCreate()
 
-    sc = spark._sc
+    sc = spark.sparkContext
 
     parquet_rdd = sc.newAPIHadoopFile(
         path,
diff --git a/examples/src/main/python/pi.py b/examples/src/main/python/pi.py
index 5db03e4a21..b39d710540 100755
--- a/examples/src/main/python/pi.py
+++ b/examples/src/main/python/pi.py
@@ -32,7 +32,7 @@ if __name__ == "__main__":
         .appName("PythonPi")\
         .getOrCreate()
 
-    sc = spark._sc
+    sc = spark.sparkContext
 
     partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2
     n = 100000 * partitions
diff --git a/examples/src/main/python/transitive_closure.py b/examples/src/main/python/transitive_closure.py
index 37c41dcd03..d88ea94e41 100755
--- a/examples/src/main/python/transitive_closure.py
+++ b/examples/src/main/python/transitive_closure.py
@@ -46,7 +46,7 @@ if __name__ == "__main__":
         .appName("PythonTransitiveClosure")\
         .getOrCreate()
 
-    sc = spark._sc
+    sc = spark.sparkContext
 
     partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2
     tc = sc.parallelize(generateGraph(), partitions).cache()
-- 
GitLab