diff --git a/LICENSE b/LICENSE
index 65e1f480d9b14740ad8a5eed0e1329dcbef85934..76a3601c66918a4ad76e26eaf102588dc47676ab 100644
--- a/LICENSE
+++ b/LICENSE
@@ -272,7 +272,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 
 ========================================================================
-For Py4J (python/lib/py4j0.7.egg and files in assembly/lib/net/sf/py4j):
+For Py4J (python/lib/py4j-0.8.2.1-src.zip)
 ========================================================================
 
 Copyright (c) 2009-2011, Barthelemy Dagenais All rights reserved.
@@ -532,7 +532,7 @@ The following components are provided under a BSD-style license. See project lin
      (New BSD license) Protocol Buffer Java API (org.spark-project.protobuf:protobuf-java:2.4.1-shaded - http://code.google.com/p/protobuf)
      (The BSD License) Fortran to Java ARPACK (net.sourceforge.f2j:arpack_combined_all:0.1 - http://f2j.sourceforge.net)
      (The BSD License) xmlenc Library (xmlenc:xmlenc:0.52 - http://xmlenc.sourceforge.net)
-     (The New BSD License) Py4J (net.sf.py4j:py4j:0.8.1 - http://py4j.sourceforge.net/)
+     (The New BSD License) Py4J (net.sf.py4j:py4j:0.8.2.1 - http://py4j.sourceforge.net/)
      (Two-clause BSD-style license) JUnit-Interface (com.novocode:junit-interface:0.10 - http://github.com/szeiger/junit-interface/)
      (ISC/BSD License) jbcrypt (org.mindrot:jbcrypt:0.3m - http://www.mindrot.org/)
 
diff --git a/bin/pyspark b/bin/pyspark
index 69b056fe28f2cce9c3ba42a9b7f26d7764894eee..39a20e2a24a3cd2302475d7b3aeea7395bf83bff 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -52,7 +52,7 @@ export PYSPARK_PYTHON
 
 # Add the PySpark classes to the Python path:
 export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
-export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
+export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
 
 # Load the PySpark shell.py script when ./pyspark is used interactively:
 export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index 0ef9eea95342e36899485bdfd7b689cf1113bc40..2c4b08af8d4c318ff7cc31147a81a259c8a20a81 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -45,7 +45,7 @@ rem Figure out which Python to use.
 if [%PYSPARK_PYTHON%] == [] set PYSPARK_PYTHON=python
 
 set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
-set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.1-src.zip;%PYTHONPATH%
+set PYTHONPATH=%FWDIR%python\lib\py4j-0.8.2.1-src.zip;%PYTHONPATH%
 
 set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
 set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
diff --git a/core/pom.xml b/core/pom.xml
index a24743495b0e13c908a672fb420253d28ed79a17..4f061099a477dbf4bfc068c5b9f8dcf600197bdc 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -275,7 +275,7 @@
     <dependency>
       <groupId>net.sf.py4j</groupId>
       <artifactId>py4j</artifactId>
-      <version>0.8.1</version>
+      <version>0.8.2.1</version>
     </dependency>
   </dependencies>
   <build>
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
index 6d3e257c4d5df98da2b505e2cd4c099d72826ce4..52c70712eea3dc758bd281a3ee51451bac8394c1 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala
@@ -29,7 +29,7 @@ private[spark] object PythonUtils {
     val pythonPath = new ArrayBuffer[String]
     for (sparkHome <- sys.env.get("SPARK_HOME")) {
       pythonPath += Seq(sparkHome, "python").mkString(File.separator)
-      pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.1-src.zip").mkString(File.separator)
+      pythonPath += Seq(sparkHome, "python", "lib", "py4j-0.8.2.1-src.zip").mkString(File.separator)
     }
     pythonPath ++= SparkContext.jarOfObject(this)
     pythonPath.mkString(File.pathSeparator)
diff --git a/python/lib/py4j-0.8.1-src.zip b/python/lib/py4j-0.8.1-src.zip
deleted file mode 100644
index 2069a328d1f2e6a94df057c6a3930048ae3f3832..0000000000000000000000000000000000000000
Binary files a/python/lib/py4j-0.8.1-src.zip and /dev/null differ
diff --git a/python/lib/py4j-0.8.2.1-src.zip b/python/lib/py4j-0.8.2.1-src.zip
new file mode 100644
index 0000000000000000000000000000000000000000..5203b84d9119ea5415114939624411ba7d3daa8a
Binary files /dev/null and b/python/lib/py4j-0.8.2.1-src.zip differ
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index 147b506dd5ca31cf10bd529bfc18a2b607c6b8b1..5c87da5815b6446a4062f3d5049b5ccd8a274496 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -36,4 +36,4 @@ export SPARK_HOME=${SPARK_PREFIX}
 export SPARK_CONF_DIR="$SPARK_HOME/conf"
 # Add the PySpark classes to the PYTHONPATH:
 export PYTHONPATH=$SPARK_HOME/python:$PYTHONPATH
-export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
+export PYTHONPATH=$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
diff --git a/sbin/spark-executor b/sbin/spark-executor
index 336549f29c9ce679142c2fd14dfa5e9ef2b5a963..3621321a9bc8df014e20ac31ff949920658cb387 100755
--- a/sbin/spark-executor
+++ b/sbin/spark-executor
@@ -20,7 +20,7 @@
 FWDIR="$(cd `dirname $0`/..; pwd)"
 
 export PYTHONPATH=$FWDIR/python:$PYTHONPATH
-export PYTHONPATH=$FWDIR/python/lib/py4j-0.8.1-src.zip:$PYTHONPATH
+export PYTHONPATH=$FWDIR/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH
 
 echo "Running spark-executor with framework dir = $FWDIR"
 exec $FWDIR/bin/spark-class org.apache.spark.executor.MesosExecutorBackend