diff --git a/.gitignore b/.gitignore
index ad72588b472d6c80b502418224fb598646d7ee12..b54a3058de6590ef7f857373313e910cc85816bf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,7 +7,7 @@
 sbt/*.jar
 .settings
 .cache
-.mima-excludes
+.generated-mima-excludes
 /build/
 work/
 out/
diff --git a/.rat-excludes b/.rat-excludes
index 15589702c559959a7402f16fe754432979f46790..52b2dfac5cf2b357d2bd7a0e7925204da04d323c 100644
--- a/.rat-excludes
+++ b/.rat-excludes
@@ -3,6 +3,7 @@ target
 .project
 .classpath
 .mima-excludes
+.generated-mima-excludes
 .rat-excludes
 .*md
 derby.log
diff --git a/dev/mima b/dev/mima
new file mode 100755
index 0000000000000000000000000000000000000000..d4099990254ccf563ccb57564d87117fe2891772
--- /dev/null
+++ b/dev/mima
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -o pipefail
+
+# Go to the Spark project root directory
+FWDIR="$(cd `dirname $0`/..; pwd)"
+cd $FWDIR
+
+./bin/spark-class org.apache.spark.tools.GenerateMIMAIgnore
+echo -e "q\n" | sbt/sbt mima-report-binary-issues | grep -v -e "info.*Resolving"
+ret_val=$?
+
+if [ $ret_val != 0 ]; then
+  echo "NOTE: Exceptions to binary compatibility can be added in project/MimaExcludes.scala"
+fi
+
+exit $ret_val
diff --git a/dev/run-tests b/dev/run-tests
index 6043f859ae4633882675127e502c31d78769e537..93d6692f83ca85418ecd745842d1635f19b6e977 100755
--- a/dev/run-tests
+++ b/dev/run-tests
@@ -81,5 +81,4 @@ fi
 echo "========================================================================="
 echo "Detecting binary incompatibilites with MiMa"
 echo "========================================================================="
-./bin/spark-class org.apache.spark.tools.GenerateMIMAIgnore
-echo -e "q\n" | sbt/sbt mima-report-binary-issues | grep -v -e "info.*Resolving" 
+dev/mima
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index 182ca7615de67c3293a1fa334c5dba82b1392420..1477809943573129de4fb30447c83463da082a84 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -15,21 +15,41 @@
  * limitations under the License.
  */
 
+import com.typesafe.tools.mima.core.{MissingTypesProblem, MissingClassProblem, ProblemFilters}
+import com.typesafe.tools.mima.core.ProblemFilters._
 import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
 import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
 import sbt._
 
 object MimaBuild {
+  // Exclude a single class and its corresponding object
+  def excludeClass(className: String) = {
+    Seq(
+      excludePackage(className),
+      ProblemFilters.exclude[MissingClassProblem](className),
+      ProblemFilters.exclude[MissingTypesProblem](className),
+      excludePackage(className + "$"),
+      ProblemFilters.exclude[MissingClassProblem](className + "$"),
+      ProblemFilters.exclude[MissingTypesProblem](className + "$")
+    )
+  }
+  // Exclude a Spark class, that is in the package org.apache.spark
+  def excludeSparkClass(className: String) = {
+    excludeClass("org.apache.spark." + className)
+  }
+
+  // Exclude a Spark package, that is in the package org.apache.spark
+  def excludeSparkPackage(packageName: String) = {
+    excludePackage("org.apache.spark." + packageName)
+  }
 
   def ignoredABIProblems(base: File) = {
-    import com.typesafe.tools.mima.core._
-    import com.typesafe.tools.mima.core.ProblemFilters._
 
     // Excludes placed here will be used for all Spark versions
     val defaultExcludes = Seq()
 
     // Read package-private excludes from file
-    val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes")
+    val excludeFilePath = (base.getAbsolutePath + "/.generated-mima-excludes")
     val excludeFile = file(excludeFilePath)
     val ignoredClasses: Seq[String] =
       if (!excludeFile.exists()) {
@@ -38,31 +58,11 @@ object MimaBuild {
         IO.read(excludeFile).split("\n")
       }
 
-    // Exclude a single class and its corresponding object
-    def excludeClass(className: String) = {
-      Seq(
-        excludePackage(className),
-        ProblemFilters.exclude[MissingClassProblem](className),
-        ProblemFilters.exclude[MissingTypesProblem](className),
-        excludePackage(className + "$"),
-        ProblemFilters.exclude[MissingClassProblem](className + "$"),
-        ProblemFilters.exclude[MissingTypesProblem](className + "$")
-      )
-    }
-
-    // Exclude a Spark class, that is in the package org.apache.spark
-    def excludeSparkClass(className: String) = {
-      excludeClass("org.apache.spark." + className)
-    }
 
-    // Exclude a Spark package, that is in the package org.apache.spark
-    def excludeSparkPackage(packageName: String) = {
-      excludePackage("org.apache.spark." + packageName)
-    }
 
     val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass)
 
-    defaultExcludes ++ externalExcludeFileClasses
+    defaultExcludes ++ externalExcludeFileClasses ++ MimaExcludes.excludes
   }
 
   def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq(
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
new file mode 100644
index 0000000000000000000000000000000000000000..ecb389de5558fea82eb6dddf21b11deb3a149f75
--- /dev/null
+++ b/project/MimaExcludes.scala
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.typesafe.tools.mima.core._
+import com.typesafe.tools.mima.core.ProblemFilters._
+
+/**
+ * Additional excludes for checking of Spark's binary compatibility.
+ *
+ * The Mima build will automatically exclude @DeveloperApi and @Experimental classes. This acts
+ * as an official audit of cases where we excluded other classes. Please use the narrowest
+ * possible exclude here. MIMA will usually tell you what exclude to use, e.g.:
+ *
+ * ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.rdd.RDD.take")
+ *
+ * It is also possible to exclude Spark classes and packages. This should be used sparingly:
+ *
+ * MimaBuild.excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap")
+ */
+object MimaExcludes {
+    val excludes =
+      SparkBuild.SPARK_VERSION match {
+        case v if v.startsWith("1.1") =>
+          Seq()
+        case v if v.startsWith("1.0") =>
+          Seq(
+            MimaBuild.excludeSparkPackage("api.java"),
+            MimaBuild.excludeSparkPackage("mllib"),
+            MimaBuild.excludeSparkPackage("streaming")
+          ) ++
+          MimaBuild.excludeSparkClass("rdd.ClassTags") ++
+          MimaBuild.excludeSparkClass("util.XORShiftRandom") ++
+          MimaBuild.excludeSparkClass("graphx.EdgeRDD") ++
+          MimaBuild.excludeSparkClass("graphx.VertexRDD") ++
+          MimaBuild.excludeSparkClass("graphx.impl.GraphImpl") ++
+          MimaBuild.excludeSparkClass("graphx.impl.RoutingTable") ++
+          MimaBuild.excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++
+          MimaBuild.excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++
+          MimaBuild.excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
+          MimaBuild.excludeSparkClass("mllib.optimization.SquaredGradient") ++
+          MimaBuild.excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
+          MimaBuild.excludeSparkClass("mllib.regression.LassoWithSGD") ++
+          MimaBuild.excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
+        case _ => Seq()
+      }
+}
+
diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
index 011db50b7d5684df4487e0ad8b987408861b419c..6a261e19a35cd9b1e3e975f98292d8b1cdb006fa 100644
--- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
@@ -105,9 +105,9 @@ object GenerateMIMAIgnore {
   }
 
   def main(args: Array[String]) {
-    scala.tools.nsc.io.File(".mima-excludes").
+    scala.tools.nsc.io.File(".generated-mima-excludes").
       writeAll(classesPrivateWithin("org.apache.spark").mkString("\n"))
-    println("Created : .mima-excludes in current directory.")
+    println("Created : .generated-mima-excludes in current directory.")
   }