From 939a322c85956eda150b10afb2ed1d8d959a7bdf Mon Sep 17 00:00:00 2001 From: Matthew Rocklin <mrocklin@gmail.com> Date: Mon, 8 Sep 2014 15:45:28 -0700 Subject: [PATCH] [SPARK-3417] Use new-style classes in PySpark Tiny PR making SQLContext a new-style class. This allows various type logic to work more effectively ```Python In [1]: import pyspark In [2]: pyspark.sql.SQLContext.mro() Out[2]: [pyspark.sql.SQLContext, object] ``` Author: Matthew Rocklin <mrocklin@gmail.com> Closes #2288 from mrocklin/sqlcontext-new-style-class and squashes the following commits: 4aadab6 [Matthew Rocklin] update other old-style classes a2dc02f [Matthew Rocklin] pyspark.sql.SQLContext is new-style class --- python/pyspark/mllib/random.py | 2 +- python/pyspark/mllib/util.py | 2 +- python/pyspark/sql.py | 2 +- python/pyspark/storagelevel.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/python/pyspark/mllib/random.py b/python/pyspark/mllib/random.py index 3e59c73db8..d53c95fd59 100644 --- a/python/pyspark/mllib/random.py +++ b/python/pyspark/mllib/random.py @@ -28,7 +28,7 @@ from pyspark.serializers import NoOpSerializer __all__ = ['RandomRDDs', ] -class RandomRDDs: +class RandomRDDs(object): """ Generator methods for creating RDDs comprised of i.i.d samples from some distribution. diff --git a/python/pyspark/mllib/util.py b/python/pyspark/mllib/util.py index 4962d05491..1c7b8c809a 100644 --- a/python/pyspark/mllib/util.py +++ b/python/pyspark/mllib/util.py @@ -25,7 +25,7 @@ from pyspark.rdd import RDD from pyspark.serializers import NoOpSerializer -class MLUtils: +class MLUtils(object): """ Helper methods to load, save and pre-process data used in MLlib. diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py index 004d4937cb..53eea6d6cf 100644 --- a/python/pyspark/sql.py +++ b/python/pyspark/sql.py @@ -899,7 +899,7 @@ def _create_cls(dataType): return Row -class SQLContext: +class SQLContext(object): """Main entry point for Spark SQL functionality. diff --git a/python/pyspark/storagelevel.py b/python/pyspark/storagelevel.py index 2aa0fb9d2c..676aa0f714 100644 --- a/python/pyspark/storagelevel.py +++ b/python/pyspark/storagelevel.py @@ -18,7 +18,7 @@ __all__ = ["StorageLevel"] -class StorageLevel: +class StorageLevel(object): """ Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory, -- GitLab