From ea662286561aa9fe321cb0a0e10cdeaf60440b90 Mon Sep 17 00:00:00 2001
From: Jeff Zhang <zjffdu@apache.org>
Date: Fri, 2 Sep 2016 10:08:14 -0700
Subject: [PATCH] [SPARK-17261] [PYSPARK] Using HiveContext after re-creating
 SparkContext in Spark 2.0 throws "Java.lang.illegalStateException: Cannot
 call methods on a stopped sparkContext"

## What changes were proposed in this pull request?

Set SparkSession._instantiatedContext as None so that we can recreate SparkSession again.

## How was this patch tested?

Tested manually using the following command in pyspark shell
```
spark.stop()
spark = SparkSession.builder.enableHiveSupport().getOrCreate()
spark.sql("show databases").show()
```

Author: Jeff Zhang <zjffdu@apache.org>

Closes #14857 from zjffdu/SPARK-17261.
---
 python/pyspark/sql/session.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index 61fa107497..8418abf99c 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -595,6 +595,7 @@ class SparkSession(object):
         """Stop the underlying :class:`SparkContext`.
         """
         self._sc.stop()
+        SparkSession._instantiatedContext = None
 
     @since(2.0)
     def __enter__(self):
-- 
GitLab