diff --git a/build/spark-build-info b/build/spark-build-info
new file mode 100755
index 0000000000000000000000000000000000000000..ad0ec67f455cb7ee344afc5e71f99abc2317c0ab
--- /dev/null
+++ b/build/spark-build-info
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This script generates the build info for spark and places it into the spark-version-info.properties file.
+# Arguments:
+#   build_tgt_directory - The target directory where properties file would be created. [./core/target/extra-resources]
+#   spark_version - The current version of spark
+
+RESOURCE_DIR="$1"
+mkdir -p "$RESOURCE_DIR"
+SPARK_BUILD_INFO="${RESOURCE_DIR}"/spark-version-info.properties
+
+echo_build_properties() {
+  echo version=$1
+  echo user=$USER
+  echo revision=$(git rev-parse HEAD)
+  echo branch=$(git rev-parse --abbrev-ref HEAD)
+  echo date=$(date -u +%Y-%m-%dT%H:%M:%SZ)
+  echo url=$(git config --get remote.origin.url)
+}
+
+echo_build_properties $2 > "$SPARK_BUILD_INFO"
diff --git a/core/pom.xml b/core/pom.xml
index 45f8bfcc05aa1dcbefedfe6d1630975089f5e590..f5fdb40696017b166b5ac56345766ced4b11e7be 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -337,7 +337,38 @@
   <build>
     <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
     <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
+    <resources>
+      <resource>
+        <directory>${project.basedir}/src/main/resources</directory>
+      </resource>
+      <resource>
+        <!-- Include the properties file to provide the build information. -->
+        <directory>${project.build.directory}/extra-resources</directory>
+        <filtering>true</filtering>
+      </resource>
+    </resources>
     <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>generate-resources</phase>
+            <configuration>
+              <!-- Execute the shell script to generate the spark build information. -->
+              <tasks>
+                <exec executable="${project.basedir}/../build/spark-build-info">
+                  <arg value="${project.build.directory}/extra-resources"/>
+                  <arg value="${pom.version}"/>
+                </exec>
+              </tasks>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-dependency-plugin</artifactId>
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 9be4cadcb430aaa604ef9bb69d11bb676b081ff3..9feafc99ac07fc592b042a12a113167f10d3e3f0 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -40,7 +40,8 @@ import org.apache.ivy.plugins.matcher.GlobPatternMatcher
 import org.apache.ivy.plugins.repository.file.FileRepository
 import org.apache.ivy.plugins.resolver.{ChainResolver, FileSystemResolver, IBiblioResolver}
 
-import org.apache.spark.{SPARK_VERSION, SparkException, SparkUserAppException}
+import org.apache.spark.{SPARK_REVISION, SPARK_VERSION, SparkException, SparkUserAppException}
+import org.apache.spark.{SPARK_BRANCH, SPARK_BUILD_DATE, SPARK_BUILD_USER, SPARK_REPO_URL}
 import org.apache.spark.api.r.RUtils
 import org.apache.spark.deploy.rest._
 import org.apache.spark.launcher.SparkLauncher
@@ -103,6 +104,10 @@ object SparkSubmit {
    /___/ .__/\_,_/_/ /_/\_\   version %s
       /_/
                         """.format(SPARK_VERSION))
+    printStream.println("Branch %s".format(SPARK_BRANCH))
+    printStream.println("Compiled by user %s on %s".format(SPARK_BUILD_USER, SPARK_BUILD_DATE))
+    printStream.println("Revision %s".format(SPARK_REVISION))
+    printStream.println("Url %s".format(SPARK_REPO_URL))
     printStream.println("Type --help for more information.")
     exitFn(0)
   }
diff --git a/core/src/main/scala/org/apache/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala
index cc5e7ef3ae0085ccc354a5e1198df516ef6e69ad..2610d6f6e45a27c74cf03a319cf899dc1776a0ea 100644
--- a/core/src/main/scala/org/apache/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -41,7 +41,58 @@ package org.apache
  * level interfaces. These are subject to changes or removal in minor releases.
  */
 
+import java.util.Properties
+
 package object spark {
-  // For package docs only
-  val SPARK_VERSION = "2.0.0-SNAPSHOT"
+
+  private object SparkBuildInfo {
+
+    val (
+        spark_version: String,
+        spark_branch: String,
+        spark_revision: String,
+        spark_build_user: String,
+        spark_repo_url: String,
+        spark_build_date: String) = {
+
+      val resourceStream = Thread.currentThread().getContextClassLoader.
+        getResourceAsStream("spark-version-info.properties")
+
+      try {
+        val unknownProp = "<unknown>"
+        val props = new Properties()
+        props.load(resourceStream)
+        (
+          props.getProperty("version", unknownProp),
+          props.getProperty("branch", unknownProp),
+          props.getProperty("revision", unknownProp),
+          props.getProperty("user", unknownProp),
+          props.getProperty("url", unknownProp),
+          props.getProperty("date", unknownProp)
+        )
+      } catch {
+        case npe: NullPointerException =>
+          throw new SparkException("Error while locating file spark-version-info.properties", npe)
+        case e: Exception =>
+          throw new SparkException("Error loading properties from spark-version-info.properties", e)
+      } finally {
+        if (resourceStream != null) {
+          try {
+            resourceStream.close()
+          } catch {
+            case e: Exception =>
+              throw new SparkException("Error closing spark build info resource stream", e)
+          }
+        }
+      }
+    }
+  }
+
+  val SPARK_VERSION = SparkBuildInfo.spark_version
+  val SPARK_BRANCH = SparkBuildInfo.spark_branch
+  val SPARK_REVISION = SparkBuildInfo.spark_revision
+  val SPARK_BUILD_USER = SparkBuildInfo.spark_build_user
+  val SPARK_REPO_URL = SparkBuildInfo.spark_repo_url
+  val SPARK_BUILD_DATE = SparkBuildInfo.spark_build_date
 }
+
diff --git a/pom.xml b/pom.xml
index 60c8c8dc7a727ecb1eafeacc095c3cd0a12acb13..6c67452adf86777b0ef85b5805a2edf1db80e993 100644
--- a/pom.xml
+++ b/pom.xml
@@ -180,6 +180,8 @@
     <antlr4.version>4.5.3</antlr4.version>
     <jpam.version>1.1</jpam.version>
     <selenium.version>2.52.0</selenium.version>
+    <paranamer.version>2.8</paranamer.version>
+    <maven-antrun.version>1.8</maven-antrun.version>
 
     <test.java.home>${java.home}</test.java.home>
     <test.exclude.tags></test.exclude.tags>
@@ -2061,7 +2063,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-antrun-plugin</artifactId>
-          <version>1.8</version>
+          <version>${maven-antrun.version}</version>
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
@@ -2184,7 +2186,7 @@
                   <pluginExecutionFilter>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-antrun-plugin</artifactId>
-                    <versionRange>[1.8,)</versionRange>
+                    <versionRange>[${maven-antrun.version},)</versionRange>
                     <goals>
                       <goal>run</goal>
                     </goals>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 744f57c5177a35cc2c5bdf0f5e48e49e972be4f7..304288a32cefee1d8de44d4ef7f5512b4710ebd8 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -360,6 +360,9 @@ object SparkBuild extends PomBuild {
     enable(MimaBuild.mimaSettings(sparkHome, x))(x)
   }
 
+  /* Generate and pick the spark build info from extra-resources */
+  enable(Core.settings)(core)
+
   /* Unsafe settings */
   enable(Unsafe.settings)(unsafe)
 
@@ -448,7 +451,19 @@ object SparkBuild extends PomBuild {
       else x.settings(Seq[Setting[_]](): _*)
     } ++ Seq[Project](OldDeps.project)
   }
+}
 
+object Core {
+  lazy val settings = Seq(
+    resourceGenerators in Compile += Def.task {
+      val buildScript = baseDirectory.value + "/../build/spark-build-info"
+      val targetDir = baseDirectory.value + "/target/extra-resources/"
+      val command =  buildScript + " " + targetDir + " " + version.value
+      Process(command).!!
+      val propsFile = baseDirectory.value / "target" / "extra-resources" / "spark-version-info.properties"
+      Seq(propsFile)
+    }.taskValue
+  )
 }
 
 object Unsafe {
@@ -480,9 +495,9 @@ object DependencyOverrides {
 }
 
 /**
-  This excludes library dependencies in sbt, which are specified in maven but are
-  not needed by sbt build.
-  */
+ * This excludes library dependencies in sbt, which are specified in maven but are
+ * not needed by sbt build.
+ */
 object ExcludedDependencies {
   lazy val settings = Seq(
     libraryDependencies ~= { libs => libs.filterNot(_.name == "groovy-all") }