diff --git a/dev/run-tests.py b/dev/run-tests.py index 450b68123e1fc9156e0cf0d7bf625ade1e919ac9..818a0c9f48419f198b4e2fc4978c14397e3011c0 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -365,8 +365,16 @@ def build_spark_assembly_sbt(hadoop_version): print("[info] Building Spark assembly (w/Hive 1.2.1) using SBT with these arguments: ", " ".join(profiles_and_goals)) exec_sbt(profiles_and_goals) - # Make sure that Java and Scala API documentation can be generated - build_spark_unidoc_sbt(hadoop_version) + + # Note that we skip Unidoc build only if Hadoop 2.6 is explicitly set in this SBT build. + # Due to a different dependency resolution in SBT & Unidoc by an unknown reason, the + # documentation build fails on a specific machine & environment in Jenkins but it was unable + # to reproduce. Please see SPARK-20343. This is a band-aid fix that should be removed in + # the future. + is_hadoop_version_2_6 = os.environ.get("AMPLAB_JENKINS_BUILD_PROFILE") == "hadoop2.6" + if not is_hadoop_version_2_6: + # Make sure that Java and Scala API documentation can be generated + build_spark_unidoc_sbt(hadoop_version) def build_apache_spark(build_tool, hadoop_version): diff --git a/pom.xml b/pom.xml index 14370d92a90808d004ed5413b2e1a77ae0459398..c1174593c19220d2511df0c92b219f1802d5fb80 100644 --- a/pom.xml +++ b/pom.xml @@ -142,7 +142,6 @@ <ivy.version>2.4.0</ivy.version> <oro.version>2.0.8</oro.version> <codahale.metrics.version>3.1.2</codahale.metrics.version> - <!-- Keep consistent with Avro vesion in SBT build for SPARK-20343 --> <avro.version>1.7.7</avro.version> <avro.mapred.classifier>hadoop2</avro.mapred.classifier> <jets3t.version>0.9.3</jets3t.version> diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 77dae289f775818186b21560873eb7b3d8a71802..e52baf51aed1af3a52b8c4bc54f384bf8d486fb8 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -318,8 +318,8 @@ object SparkBuild extends PomBuild { enable(MimaBuild.mimaSettings(sparkHome, x))(x) } - /* Generate and pick the spark build info from extra-resources and override a dependency */ - enable(Core.settings ++ CoreDependencyOverrides.settings)(core) + /* Generate and pick the spark build info from extra-resources */ + enable(Core.settings)(core) /* Unsafe settings */ enable(Unsafe.settings)(unsafe) @@ -443,16 +443,6 @@ object DockerIntegrationTests { ) } -/** - * Overrides to work around sbt's dependency resolution being different from Maven's in Unidoc. - * - * Note that, this is a hack that should be removed in the future. See SPARK-20343 - */ -object CoreDependencyOverrides { - lazy val settings = Seq( - dependencyOverrides += "org.apache.avro" % "avro" % "1.7.7") -} - /** * Overrides to work around sbt's dependency resolution being different from Maven's. */