diff --git a/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala b/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala index 10a017df831e0bc234089aaf0186a6a28091a473..4fbbbacb76081c477d922d6b3fd7e4acce755de3 100644 --- a/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala +++ b/sql/hive/src/test/resources/regression-test-SPARK-8489/Main.scala @@ -15,7 +15,6 @@ * limitations under the License. */ -import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession /** @@ -33,15 +32,18 @@ object Main { def main(args: Array[String]) { // scalastyle:off println println("Running regression test for SPARK-8489.") - val sc = new SparkContext("local", "testing") - val sparkSession = SparkSession.withHiveSupport(sc) + val spark = SparkSession.builder + .master("local") + .appName("testing") + .enableHiveSupport() + .getOrCreate() // This line should not throw scala.reflect.internal.MissingRequirementError. // See SPARK-8470 for more detail. - val df = sparkSession.createDataFrame(Seq(MyCoolClass("1", "2", "3"))) + val df = spark.createDataFrame(Seq(MyCoolClass("1", "2", "3"))) df.collect() println("Regression test for SPARK-8489 success!") // scalastyle:on println - sc.stop() + spark.stop() } } diff --git a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar index 26d410f33029bb2a9d2b65d9f6985aa159e924a8..3f28d37b93150ebdeec4c6d803351f8c9e1f6cf2 100644 Binary files a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar and b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.10.jar differ diff --git a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar index f34784752f69f71ce56736d3493cfd55bdc32b4a..5e093697e219a249e6f6dedb1ab9f567c58a2fa9 100644 Binary files a/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar and b/sql/hive/src/test/resources/regression-test-SPARK-8489/test-2.11.jar differ diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala index a3200117994944a66fd138836da5a72f26f60c72..a717a9978e3c14d2643aafde2d87107d57cc4bcf 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala @@ -142,8 +142,7 @@ class HiveSparkSubmitSuite runSparkSubmit(args) } - // TODO: re-enable this after rebuilding the jar (HiveContext was removed) - ignore("SPARK-8489: MissingRequirementError during reflection") { + test("SPARK-8489: MissingRequirementError during reflection") { // This test uses a pre-built jar to test SPARK-8489. In a nutshell, this test creates // a HiveContext and uses it to create a data frame from an RDD using reflection. // Before the fix in SPARK-8470, this results in a MissingRequirementError because