diff --git a/sbin/compute-classpath.cmd b/bin/compute-classpath.cmd
similarity index 100%
rename from sbin/compute-classpath.cmd
rename to bin/compute-classpath.cmd
diff --git a/sbin/compute-classpath.sh b/bin/compute-classpath.sh
similarity index 100%
rename from sbin/compute-classpath.sh
rename to bin/compute-classpath.sh
diff --git a/bin/run-example b/bin/run-example
index f2699c38a9a0a3cdff7b8b75557e1174977d0467..6c5d4a6a8f3641223598970d6c450468baf61ecd 100755
--- a/bin/run-example
+++ b/bin/run-example
@@ -61,7 +61,7 @@ fi
 
 # Since the examples JAR ideally shouldn't include spark-core (that dependency should be
 # "provided"), also add our standard Spark classpath, built using compute-classpath.sh.
-CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
+CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
 CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH"
 
 if $cygwin; then
diff --git a/bin/spark-class b/bin/spark-class
index 4e440d8729420ddefd03f04be0f69cb07084f7dc..c4225a392d6dae2374465f6a6f1f37c3ea2f6cee 100755
--- a/bin/spark-class
+++ b/bin/spark-class
@@ -128,7 +128,7 @@ if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
 fi
 
 # Compute classpath using external script
-CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
+CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
 
 if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
   CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index d4084820f68e02151c770ef579a1ab4da06c07d1..fff9cb60c78498b2643af10a311c63b3b85607bb 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -122,7 +122,7 @@ private[spark] class ExecutorRunner(
     // Figure out our classpath with the external compute-classpath script
     val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh"
     val classPath = Utils.executeAndGetOutput(
-        Seq(sparkHome + "/sbin/compute-classpath" + ext),
+        Seq(sparkHome + "/bin/compute-classpath" + ext),
         extraEnvironment=appDesc.command.environment)
 
     Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts