diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 54ff1bf8e7c3ea63db72a5bd9015e26705948b9d..c8297b662e36b820da84dfd308eb747ce4923e8d 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -7,10 +7,15 @@ import os
 import pyspark
 from pyspark.context import SparkContext
 
+# this is the equivalent of ADD_JARS
+add_files = os.environ.get("ADD_FILES").split(',') if os.environ.get("ADD_FILES") != None else None
 
-sc = SparkContext(os.environ.get("MASTER", "local"), "PySparkShell")
+sc = SparkContext(os.environ.get("MASTER", "local"), "PySparkShell", pyFiles=add_files)
 print "Spark context avaiable as sc."
 
+if add_files != None:
+    print "Adding files: [%s]" % ", ".join(add_files)
+
 # The ./pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
 # which allows us to execute the user's PYTHONSTARTUP file:
 _pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')