Skip to content
Snippets Groups Projects
Commit 509b721d authored by Josh Rosen's avatar Josh Rosen
Browse files

Fix Akka configuration in assembly jar.

This resolves an issue where running Spark from
the assembly jar would cause a "No configuration
setting found for key 'akka.version'" exception.

This solution is from the Akka Team Blog:

http://letitcrash.com/post/21025950392/
parent 3ee2530c
No related branches found
No related tags found
No related merge requests found
import sbt._ import sbt._
import Keys._ import Keys._
import classpath.ClasspathUtilities.isArchive
import java.io.FileOutputStream
import sbtassembly.Plugin._ import sbtassembly.Plugin._
import AssemblyKeys._ import AssemblyKeys._
...@@ -69,12 +71,12 @@ object SparkBuild extends Build { ...@@ -69,12 +71,12 @@ object SparkBuild extends Build {
"cc.spray" % "spray-can" % "1.0-M2.1", "cc.spray" % "spray-can" % "1.0-M2.1",
"cc.spray" % "spray-server" % "1.0-M2.1" "cc.spray" % "spray-server" % "1.0-M2.1"
) )
) ++ assemblySettings ++ extraAssemblySettings ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings
def replSettings = sharedSettings ++ Seq( def replSettings = sharedSettings ++ Seq(
name := "spark-repl", name := "spark-repl",
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _) libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)
) ++ assemblySettings ++ extraAssemblySettings ) ++ assemblySettings ++ extraAssemblySettings ++ mergeSettings
def examplesSettings = sharedSettings ++ Seq( def examplesSettings = sharedSettings ++ Seq(
name := "spark-examples" name := "spark-examples"
...@@ -82,6 +84,57 @@ object SparkBuild extends Build { ...@@ -82,6 +84,57 @@ object SparkBuild extends Build {
def bagelSettings = sharedSettings ++ Seq(name := "spark-bagel") def bagelSettings = sharedSettings ++ Seq(name := "spark-bagel")
// Fix for "No configuration setting found for key 'akka.version'" exception
// when running Spark from the jar generated by the "assembly" task; see
// http://letitcrash.com/post/21025950392/howto-sbt-assembly-vs-reference-conf
lazy val merge = TaskKey[File]("merge-reference",
"merge all reference.conf")
lazy val mergeSettings: Seq[Project.Setting[_]] = Seq(
merge <<= (fullClasspath in assembly) map {
c =>
// collect from all elements of the full classpath
val (libs, dirs) =
c map (_.data) partition (isArchive)
// goal is to simply concatenate files here
val dest = file("reference.conf")
val out = new FileOutputStream(dest)
val append = IO.transfer(_: File, out)
try {
// first collect from managed sources
(dirs * "reference.conf").get foreach append
// then from dependency jars by unzipping and
// collecting reference.conf if present
for (lib <- libs) {
IO withTemporaryDirectory {
dir =>
IO.unzip(lib, dir, "reference.conf")
(dir * "reference.conf").get foreach append
}
}
// return merged file location as task result
dest
} finally {
out.close()
}
},
// get rid of the individual files from jars
excludedFiles in assembly <<=
(excludedFiles in assembly) {
(old) => (bases) =>
old(bases) ++ (bases flatMap (base =>
(base / "reference.conf").get))
},
// tell sbt-assembly to include our merged file
assembledMappings in assembly <<=
(assembledMappings in assembly, merge) map {
(old, merged) => (f) =>
old(f) :+(merged, "reference.conf")
}
)
def extraAssemblySettings() = Seq(test in assembly := {}) ++ Seq( def extraAssemblySettings() = Seq(test in assembly := {}) ++ Seq(
mergeStrategy in assembly := { mergeStrategy in assembly := {
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment