diff --git a/build/mvn b/build/mvn index c3ab62da368685519aa70cc458c51c81d322319f..866bad892c752bcea08de7a4a18b4e90674c272e 100755 --- a/build/mvn +++ b/build/mvn @@ -91,13 +91,13 @@ install_mvn() { # Install zinc under the build/ folder install_zinc() { - local zinc_path="zinc-0.3.9/bin/zinc" + local zinc_path="zinc-0.3.11/bin/zinc" [ ! -f "${_DIR}/${zinc_path}" ] && ZINC_INSTALL_FLAG=1 local TYPESAFE_MIRROR=${TYPESAFE_MIRROR:-https://downloads.typesafe.com} install_app \ - "${TYPESAFE_MIRROR}/zinc/0.3.9" \ - "zinc-0.3.9.tgz" \ + "${TYPESAFE_MIRROR}/zinc/0.3.11" \ + "zinc-0.3.11.tgz" \ "${zinc_path}" ZINC_BIN="${_DIR}/${zinc_path}" } diff --git a/pom.xml b/pom.xml index 2e67364d3ebe1d394016c126c359955bf1ccb557..2f61d33b470c78269433d4cae4655d7e0349f7f2 100644 --- a/pom.xml +++ b/pom.xml @@ -1909,7 +1909,7 @@ <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>build-helper-maven-plugin</artifactId> - <version>1.10</version> + <version>1.12</version> </plugin> <plugin> <groupId>net.alchim31.maven</groupId> @@ -1972,7 +1972,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> - <version>3.5.1</version> + <version>3.6.0</version> <configuration> <source>${java.version}</source> <target>${java.version}</target> @@ -2092,7 +2092,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-jar-plugin</artifactId> - <version>2.6</version> + <version>3.0.2</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> @@ -2102,7 +2102,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> - <version>2.4</version> + <version>3.0.1</version> <configuration> <attach>true</attach> </configuration> @@ -2137,17 +2137,17 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> - <version>2.10.3</version> + <version>2.10.4</version> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>exec-maven-plugin</artifactId> - <version>1.4.0</version> + <version>1.5.0</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> - <version>2.6</version> + <version>3.0.0</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala index 77397eab81ede129a07a4142dc6d3f156c46332c..de0655b6cb357ec0c5e3962fa8dafc0d45df7ace 100644 --- a/project/MimaBuild.scala +++ b/project/MimaBuild.scala @@ -22,7 +22,7 @@ import com.typesafe.tools.mima.core._ import com.typesafe.tools.mima.core.MissingClassProblem import com.typesafe.tools.mima.core.MissingTypesProblem import com.typesafe.tools.mima.core.ProblemFilters._ -import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact} +import com.typesafe.tools.mima.plugin.MimaKeys.{mimaBinaryIssueFilters, mimaPreviousArtifacts} import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings @@ -92,8 +92,8 @@ object MimaBuild { val project = projectRef.project val fullId = "spark-" + project + "_2.11" mimaDefaultSettings ++ - Seq(previousArtifact := Some(organization % fullId % previousSparkVersion), - binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value)) + Seq(mimaPreviousArtifacts := Set(organization % fullId % previousSparkVersion), + mimaBinaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value)) } } diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index e3fbe0379fb7b6c7635d41c3e3420682d8150a8d..fdc33c77fe292d1bba65819f746592010de9c738 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -251,13 +251,12 @@ object SparkBuild extends PomBuild { Resolver.file("local", file(Path.userHome.absolutePath + "/.ivy2/local"))(Resolver.ivyStylePatterns) ), externalResolvers := resolvers.value, - otherResolvers <<= SbtPomKeys.mvnLocalRepository(dotM2 => Seq(Resolver.file("dotM2", dotM2))), - publishLocalConfiguration in MavenCompile <<= (packagedArtifacts, deliverLocal, ivyLoggingLevel) map { - (arts, _, level) => new PublishConfiguration(None, "dotM2", arts, Seq(), level) - }, + otherResolvers := SbtPomKeys.mvnLocalRepository(dotM2 => Seq(Resolver.file("dotM2", dotM2))).value, + publishLocalConfiguration in MavenCompile := + new PublishConfiguration(None, "dotM2", packagedArtifacts.value, Seq(), ivyLoggingLevel.value), publishMavenStyle in MavenCompile := true, - publishLocal in MavenCompile <<= publishTask(publishLocalConfiguration in MavenCompile, deliverLocal), - publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn, + publishLocal in MavenCompile := publishTask(publishLocalConfiguration in MavenCompile, deliverLocal).value, + publishLocalBoth := Seq(publishLocal in MavenCompile, publishLocal).dependOn.value, javacOptions in (Compile, doc) ++= { val versionParts = System.getProperty("java.version").split("[+.\\-]+", 3) @@ -431,7 +430,8 @@ object SparkBuild extends PomBuild { val packages :: className :: otherArgs = spaceDelimited("<group:artifact:version> <MainClass> [args]").parsed.toList val scalaRun = (runner in run).value val classpath = (fullClasspath in Runtime).value - val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in "core").value.getCanonicalPath) ++ otherArgs + val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in LocalProject("core")) + .value.getCanonicalPath) ++ otherArgs println(args) scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log) }, @@ -443,7 +443,7 @@ object SparkBuild extends PomBuild { } ))(assembly) - enable(Seq(sparkShell := sparkShell in "assembly"))(spark) + enable(Seq(sparkShell := sparkShell in LocalProject("assembly")))(spark) // TODO: move this to its upstream project. override def projectDefinitions(baseDirectory: File): Seq[Project] = { @@ -512,9 +512,9 @@ object OldDeps { lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings) - lazy val allPreviousArtifactKeys = Def.settingDyn[Seq[Option[ModuleID]]] { + lazy val allPreviousArtifactKeys = Def.settingDyn[Seq[Set[ModuleID]]] { SparkBuild.mimaProjects - .map { project => MimaKeys.previousArtifact in project } + .map { project => MimaKeys.mimaPreviousArtifacts in project } .map(k => Def.setting(k.value)) .join } @@ -568,9 +568,9 @@ object Hive { javaOptions in Test := (javaOptions in Test).value.filterNot(_ == "-ea"), // Supporting all SerDes requires us to depend on deprecated APIs, so we turn off the warnings // only for this subproject. - scalacOptions <<= scalacOptions map { currentOpts: Seq[String] => + scalacOptions := (scalacOptions map { currentOpts: Seq[String] => currentOpts.filterNot(_ == "-deprecation") - }, + }).value, initialCommands in console := """ |import org.apache.spark.SparkContext @@ -608,17 +608,18 @@ object Assembly { sys.props.get("hadoop.version") .getOrElse(SbtPomKeys.effectivePom.value.getProperties.get("hadoop.version").asInstanceOf[String]) }, - jarName in assembly <<= (version, moduleName, hadoopVersion) map { (v, mName, hv) => - if (mName.contains("streaming-flume-assembly") || mName.contains("streaming-kafka-0-8-assembly") || mName.contains("streaming-kafka-0-10-assembly") || mName.contains("streaming-kinesis-asl-assembly")) { + jarName in assembly := { + if (moduleName.value.contains("streaming-flume-assembly") + || moduleName.value.contains("streaming-kafka-0-8-assembly") + || moduleName.value.contains("streaming-kafka-0-10-assembly") + || moduleName.value.contains("streaming-kinesis-asl-assembly")) { // This must match the same name used in maven (see external/kafka-0-8-assembly/pom.xml) - s"${mName}-${v}.jar" + s"${moduleName.value}-${version.value}.jar" } else { - s"${mName}-${v}-hadoop${hv}.jar" + s"${moduleName.value}-${version.value}-hadoop${hadoopVersion.value}.jar" } }, - jarName in (Test, assembly) <<= (version, moduleName, hadoopVersion) map { (v, mName, hv) => - s"${mName}-test-${v}.jar" - }, + jarName in (Test, assembly) := s"${moduleName.value}-test-${version.value}.jar", mergeStrategy in assembly := { case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard @@ -639,13 +640,13 @@ object PySparkAssembly { // Use a resource generator to copy all .py files from python/pyspark into a managed directory // to be included in the assembly. We can't just add "python/" to the assembly's resource dir // list since that will copy unneeded / unwanted files. - resourceGenerators in Compile <+= resourceManaged in Compile map { outDir: File => + resourceGenerators in Compile += Def.macroValueI(resourceManaged in Compile map { outDir: File => val src = new File(BuildCommons.sparkHome, "python/pyspark") val zipFile = new File(BuildCommons.sparkHome , "python/lib/pyspark.zip") zipFile.delete() zipRecursive(src, zipFile) Seq[File]() - } + }).value ) private def zipRecursive(source: File, destZipFile: File) = { @@ -771,7 +772,7 @@ object Unidoc { object CopyDependencies { val copyDeps = TaskKey[Unit]("copyDeps", "Copies needed dependencies to the build directory.") - val destPath = (crossTarget in Compile) / "jars" + val destPath = (crossTarget in Compile) { _ / "jars"} lazy val settings = Seq( copyDeps := { @@ -791,7 +792,7 @@ object CopyDependencies { } }, crossTarget in (Compile, packageBin) := destPath.value, - packageBin in Compile <<= (packageBin in Compile).dependsOn(copyDeps) + packageBin in Compile := (packageBin in Compile).dependsOn(copyDeps).value ) } @@ -862,7 +863,7 @@ object TestSettings { // Only allow one test at a time, even across projects, since they run in the same JVM parallelExecution in Test := false, // Make sure the test temp directory exists. - resourceGenerators in Test <+= resourceManaged in Test map { outDir: File => + resourceGenerators in Test += Def.macroValueI(resourceManaged in Test map { outDir: File => var dir = new File(testTempDir) if (!dir.isDirectory()) { // Because File.mkdirs() can fail if multiple callers are trying to create the same @@ -880,7 +881,7 @@ object TestSettings { } } Seq[File]() - }, + }).value, concurrentRestrictions in Global += Tags.limit(Tags.Test, 1), // Remove certain packages from Scaladoc scalacOptions in (Compile, doc) := Seq( diff --git a/project/build.properties b/project/build.properties index 1e38156e0b5772ceee314f6ef5acd9581168a38d..d339865ab915a1955ad683810bc4161ddfe6ce96 100644 --- a/project/build.properties +++ b/project/build.properties @@ -14,4 +14,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -sbt.version=0.13.11 +sbt.version=0.13.13