diff --git a/python/pyspark/sql/column.py b/python/pyspark/sql/column.py
index 43e9baece2de99507e8117fa815da1df858c845d..90fb76f9b5a5b839463fa5bed53eebc314be6715 100644
--- a/python/pyspark/sql/column.py
+++ b/python/pyspark/sql/column.py
@@ -418,8 +418,6 @@ class Column(object):
         >>> window = Window.partitionBy("name").orderBy("age").rowsBetween(-1, 1)
         >>> from pyspark.sql.functions import rank, min
         >>> # df.select(rank().over(window), min('age').over(window))
-
-        .. note:: Window functions is only supported with HiveContext in 1.4
         """
         from pyspark.sql.window import WindowSpec
         if not isinstance(window, WindowSpec):
diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py
index 2096236d7f36fbf34414d8fd206a9f89b5979faf..78ab2e81bfce2d767ba4015f1f69d699021af96a 100644
--- a/python/pyspark/sql/context.py
+++ b/python/pyspark/sql/context.py
@@ -17,6 +17,7 @@
 
 from __future__ import print_function
 import sys
+import warnings
 
 if sys.version >= '3':
     basestring = unicode = str
@@ -434,7 +435,6 @@ class SQLContext(object):
         return ContinuousQueryManager(self._ssql_ctx.streams())
 
 
-# TODO(andrew): deprecate this
 class HiveContext(SQLContext):
     """A variant of Spark SQL that integrates with data stored in Hive.
 
@@ -444,8 +444,15 @@ class HiveContext(SQLContext):
     :param sparkContext: The SparkContext to wrap.
     :param jhiveContext: An optional JVM Scala HiveContext. If set, we do not instantiate a new
         :class:`HiveContext` in the JVM, instead we make all calls to this object.
+
+    .. note:: Deprecated in 2.0.0. Use SparkSession.builder.enableHiveSupport().getOrCreate().
     """
 
+    warnings.warn(
+        "HiveContext is deprecated in Spark 2.0.0. Please use " +
+        "SparkSession.builder.enableHiveSupport().getOrCreate() instead.",
+        DeprecationWarning)
+
     def __init__(self, sparkContext, jhiveContext=None):
         if jhiveContext is None:
             sparkSession = SparkSession.withHiveSupport(sparkContext)
diff --git a/python/pyspark/sql/streaming.py b/python/pyspark/sql/streaming.py
index bf03fdca913945eb8b312759a32eca5cf253840c..8238b8e7cde6b96be7b5e5858aa6090a65a25a0b 100644
--- a/python/pyspark/sql/streaming.py
+++ b/python/pyspark/sql/streaming.py
@@ -221,7 +221,7 @@ def _test():
     globs['os'] = os
     globs['sc'] = sc
     globs['sqlContext'] = SQLContext(sc)
-    globs['hiveContext'] = HiveContext(sc)
+    globs['hiveContext'] = HiveContext._createForTesting(sc)
     globs['df'] = \
         globs['sqlContext'].read.format('text').stream('python/test_support/sql/streaming')