From 19fdb21afbf0eae4483cf6d4ef32daffd1994b89 Mon Sep 17 00:00:00 2001
From: Jeff Zhang <zjffdu@apache.org>
Date: Tue, 26 Jan 2016 14:58:39 -0800
Subject: [PATCH] [SPARK-12993][PYSPARK] Remove usage of ADD_FILES in pyspark

environment variable ADD_FILES is created for adding python files on spark context to be distributed to executors (SPARK-865), this is deprecated now. User are encouraged to use --py-files for adding python files.

Author: Jeff Zhang <zjffdu@apache.org>

Closes #10913 from zjffdu/SPARK-12993.
---
 python/pyspark/shell.py | 11 +----------
 1 file changed, 1 insertion(+), 10 deletions(-)

diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 26cafca8b8..7c37f75193 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -32,15 +32,10 @@ from pyspark.context import SparkContext
 from pyspark.sql import SQLContext, HiveContext
 from pyspark.storagelevel import StorageLevel
 
-# this is the deprecated equivalent of ADD_JARS
-add_files = None
-if os.environ.get("ADD_FILES") is not None:
-    add_files = os.environ.get("ADD_FILES").split(',')
-
 if os.environ.get("SPARK_EXECUTOR_URI"):
     SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
 
-sc = SparkContext(pyFiles=add_files)
+sc = SparkContext()
 atexit.register(lambda: sc.stop())
 
 try:
@@ -68,10 +63,6 @@ print("Using Python version %s (%s, %s)" % (
     platform.python_build()[1]))
 print("SparkContext available as sc, %s available as sqlContext." % sqlContext.__class__.__name__)
 
-if add_files is not None:
-    print("Warning: ADD_FILES environment variable is deprecated, use --py-files argument instead")
-    print("Adding files: [%s]" % ", ".join(add_files))
-
 # The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
 # which allows us to execute the user's PYTHONSTARTUP file:
 _pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
-- 
GitLab