From 3004074152b7261c2a968bb8e94ec7c41a7b43c1 Mon Sep 17 00:00:00 2001
From: Tathagata Das <tathagata.das1565@gmail.com>
Date: Fri, 22 Aug 2014 21:34:48 -0700
Subject: [PATCH] [SPARK-3169] Removed dependency on spark streaming test from
 spark flume sink

Due to maven bug https://jira.codehaus.org/browse/MNG-1378, maven could not resolve spark streaming classes required by the spark-streaming test-jar dependency of external/flume-sink. There is no particular reason that the external/flume-sink has to depend on Spark Streaming at all, so I am eliminating this dependency. Also I have removed the exclusions present in the Flume dependencies, as there is no reason to exclude them (they were excluded in the external/flume module to prevent dependency collisions with Spark).

Since Jenkins will test the sbt build and the unit test, I only tested maven compilation locally.

Author: Tathagata Das <tathagata.das1565@gmail.com>

Closes #2101 from tdas/spark-sink-pom-fix and squashes the following commits:

8f42621 [Tathagata Das] Added Flume sink exclusions back, and added netty to test dependencies
93b559f [Tathagata Das] Removed dependency on spark streaming test from spark flume sink
---
 external/flume-sink/pom.xml                    | 18 ++++++++++++------
 .../streaming/flume/sink/SparkSinkSuite.scala  | 10 +++++-----
 2 files changed, 17 insertions(+), 11 deletions(-)

diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml
index 0c68defa5e..19192e40a7 100644
--- a/external/flume-sink/pom.xml
+++ b/external/flume-sink/pom.xml
@@ -62,7 +62,7 @@
           <groupId>org.apache.thrift</groupId>
           <artifactId>libthrift</artifactId>
         </exclusion>
-      </exclusions>
+    </exclusions>
     </dependency>
     <dependency>
       <groupId>org.scala-lang</groupId>
@@ -71,13 +71,19 @@
     <dependency>
       <groupId>org.scalatest</groupId>
       <artifactId>scalatest_${scala.binary.version}</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-streaming_${scala.binary.version}</artifactId>
-      <version>${project.version}</version>
-      <type>test-jar</type>
-      <scope>test</scope> <!-- Need it only for tests, don't package it -->
+      <!--
+        Netty explicitly added in test as it has been excluded from
+        Flume dependency (to avoid runtime problems when running with
+        Spark) but unit tests need it. Version of Netty on which
+        Flume 1.4.0 depends on is "3.4.0.Final" .
+      -->
+      <groupId>io.netty</groupId>
+      <artifactId>netty</artifactId>
+      <version>3.4.0.Final</version>
+      <scope>test</scope>
     </dependency>
   </dependencies>
   <build>
diff --git a/external/flume-sink/src/test/scala/org/apache/spark/streaming/flume/sink/SparkSinkSuite.scala b/external/flume-sink/src/test/scala/org/apache/spark/streaming/flume/sink/SparkSinkSuite.scala
index 44b27edf85..75a6668c62 100644
--- a/external/flume-sink/src/test/scala/org/apache/spark/streaming/flume/sink/SparkSinkSuite.scala
+++ b/external/flume-sink/src/test/scala/org/apache/spark/streaming/flume/sink/SparkSinkSuite.scala
@@ -30,14 +30,14 @@ import org.apache.avro.ipc.specific.SpecificRequestor
 import org.apache.flume.Context
 import org.apache.flume.channel.MemoryChannel
 import org.apache.flume.event.EventBuilder
-import org.apache.spark.streaming.TestSuiteBase
 import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
+import org.scalatest.FunSuite
 
-class SparkSinkSuite extends TestSuiteBase {
+class SparkSinkSuite extends FunSuite {
   val eventsPerBatch = 1000
   val channelCapacity = 5000
 
-  test("Success") {
+  test("Success with ack") {
     val (channel, sink) = initializeChannelAndSink()
     channel.start()
     sink.start()
@@ -57,7 +57,7 @@ class SparkSinkSuite extends TestSuiteBase {
     transceiver.close()
   }
 
-  test("Nack") {
+  test("Failure with nack") {
     val (channel, sink) = initializeChannelAndSink()
     channel.start()
     sink.start()
@@ -76,7 +76,7 @@ class SparkSinkSuite extends TestSuiteBase {
     transceiver.close()
   }
 
-  test("Timeout") {
+  test("Failure with timeout") {
     val (channel, sink) = initializeChannelAndSink(Map(SparkSinkConfig
       .CONF_TRANSACTION_TIMEOUT -> 1.toString))
     channel.start()
-- 
GitLab