diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 5c4e79cb0499eea6107eeba30d153184ee92774c..ac4b2b035f5c1a69125b78d20b1bcaeb211c06a1 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -132,6 +132,9 @@ class SparkContext(object): self._conf = conf else: self._conf = SparkConf(_jvm=SparkContext._jvm) + if conf is not None: + for k, v in conf.getAll(): + self._conf.set(k, v) self._batchSize = batchSize # -1 represents an unlimited batch size self._unbatched_serializer = serializer diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py index fe314c54a1b188d3aabbf35a39ea4c0ae4159e7b..8e35a4ee8e2d32701c15b62cdde9431d7da2375b 100644 --- a/python/pyspark/tests.py +++ b/python/pyspark/tests.py @@ -1970,6 +1970,26 @@ class SparkSubmitTests(unittest.TestCase): self.assertEqual(0, proc.returncode) self.assertIn("[2, 4, 6]", out.decode('utf-8')) + def test_user_configuration(self): + """Make sure user configuration is respected (SPARK-19307)""" + script = self.createTempFile("test.py", """ + |from pyspark import SparkConf, SparkContext + | + |conf = SparkConf().set("spark.test_config", "1") + |sc = SparkContext(conf = conf) + |try: + | if sc._conf.get("spark.test_config") != "1": + | raise Exception("Cannot find spark.test_config in SparkContext's conf.") + |finally: + | sc.stop() + """) + proc = subprocess.Popen( + [self.sparkSubmit, "--master", "local", script], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + out, err = proc.communicate() + self.assertEqual(0, proc.returncode, msg="Process failed with error:\n {0}".format(out)) + class ContextTests(unittest.TestCase):