Skip to content
Snippets Groups Projects
Commit 448aef67 authored by Tathagata Das's avatar Tathagata Das
Browse files

Moved DStream, DStreamCheckpointData and PairDStream from...

Moved DStream, DStreamCheckpointData and PairDStream from org.apache.spark.streaming to org.apache.spark.streaming.dstream.
parent c5921e5c
No related branches found
No related tags found
No related merge requests found
Showing
with 40 additions and 32 deletions
......@@ -167,7 +167,7 @@ Spark Streaming features windowed computations, which allow you to apply transfo
</tr>
</table>
A complete list of DStream operations is available in the API documentation of [DStream](api/streaming/index.html#org.apache.spark.streaming.DStream) and [PairDStreamFunctions](api/streaming/index.html#org.apache.spark.streaming.PairDStreamFunctions).
A complete list of DStream operations is available in the API documentation of [DStream](api/streaming/index.html#org.apache.spark.streaming.dstream.DStream) and [PairDStreamFunctions](api/streaming/index.html#org.apache.spark.streaming.dstream.PairDStreamFunctions).
## Output Operations
When an output operator is called, it triggers the computation of a stream. Currently the following output operators are defined:
......
......@@ -18,8 +18,9 @@
package org.apache.spark.streaming.flume
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{StreamingContext, DStream}
import org.apache.spark.streaming.{StreamingContext}
import org.apache.spark.streaming.api.java.{JavaStreamingContext, JavaDStream}
import org.apache.spark.streaming.dstream.DStream
object FlumeUtils {
/**
......
......@@ -26,8 +26,9 @@ import java.util.{Map => JMap}
import kafka.serializer.{Decoder, StringDecoder}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{StreamingContext, DStream}
import org.apache.spark.streaming.{StreamingContext}
import org.apache.spark.streaming.api.java.{JavaStreamingContext, JavaPairDStream}
import org.apache.spark.streaming.dstream.DStream
object KafkaUtils {
......
......@@ -18,9 +18,10 @@
package org.apache.spark.streaming.mqtt
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{StreamingContext, DStream}
import org.apache.spark.streaming.{StreamingContext}
import org.apache.spark.streaming.api.java.{JavaStreamingContext, JavaDStream}
import scala.reflect.ClassTag
import org.apache.spark.streaming.dstream.DStream
object MQTTUtils {
/**
......
......@@ -20,8 +20,9 @@ package org.apache.spark.streaming.twitter
import twitter4j.Status
import twitter4j.auth.Authorization
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{StreamingContext, DStream}
import org.apache.spark.streaming.{StreamingContext}
import org.apache.spark.streaming.api.java.{JavaDStream, JavaStreamingContext}
import org.apache.spark.streaming.dstream.DStream
object TwitterUtils {
/**
......
......@@ -25,8 +25,9 @@ import akka.zeromq.Subscribe
import org.apache.spark.api.java.function.{Function => JFunction}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receivers.ReceiverSupervisorStrategy
import org.apache.spark.streaming.{StreamingContext, DStream}
import org.apache.spark.streaming.{StreamingContext}
import org.apache.spark.streaming.api.java.{JavaStreamingContext, JavaDStream}
import org.apache.spark.streaming.dstream.DStream
object ZeroMQUtils {
/**
......
......@@ -17,11 +17,11 @@
package org.apache.spark.streaming
import org.apache.spark.streaming.dstream.{NetworkInputDStream, InputDStream}
import scala.collection.mutable.ArrayBuffer
import java.io.{ObjectInputStream, IOException, ObjectOutputStream}
import collection.mutable.ArrayBuffer
import org.apache.spark.Logging
import org.apache.spark.streaming.scheduler.Job
import org.apache.spark.streaming.dstream.{DStream, NetworkInputDStream, InputDStream}
final private[streaming] class DStreamGraph extends Serializable with Logging {
......
......@@ -17,13 +17,14 @@
package org.apache.spark.streaming.api.java
import org.apache.spark.streaming.{Duration, Time, DStream}
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.api.java.function.{Function => JFunction}
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
import org.apache.spark.streaming.dstream.DStream
/**
* A Discretized Stream (DStream), the basic abstraction in Spark Streaming, is a continuous
......
......@@ -30,6 +30,7 @@ import org.apache.spark.api.java.function.{Function3 => JFunction3, _}
import java.util
import org.apache.spark.rdd.RDD
import JavaDStream._
import org.apache.spark.streaming.dstream.DStream
trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T, R]]
extends Serializable {
......
......@@ -35,6 +35,7 @@ import org.apache.spark.storage.StorageLevel
import com.google.common.base.Optional
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.PairRDDFunctions
import org.apache.spark.streaming.dstream.DStream
class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
implicit val kManifest: ClassTag[K],
......
......@@ -36,6 +36,7 @@ import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming._
import org.apache.spark.streaming.scheduler.StreamingListener
import org.apache.hadoop.conf.Configuration
import org.apache.spark.streaming.dstream.DStream
/**
* A StreamingContext is the main entry point for Spark Streaming functionality. Besides the basic
......
......@@ -15,21 +15,22 @@
* limitations under the License.
*/
package org.apache.spark.streaming
import StreamingContext._
import org.apache.spark.streaming.dstream._
import org.apache.spark.streaming.scheduler.Job
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.MetadataCleaner
package org.apache.spark.streaming.dstream
import scala.collection.mutable.HashMap
import scala.reflect.ClassTag
import java.io.{ObjectInputStream, IOException, ObjectOutputStream}
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.MetadataCleaner
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.streaming.scheduler.Job
import org.apache.spark.streaming.Duration
/**
* A Discretized Stream (DStream), the basic abstraction in Spark Streaming, is a continuous
......@@ -41,7 +42,7 @@ import java.io.{ObjectInputStream, IOException, ObjectOutputStream}
* by a parent DStream.
*
* This class contains the basic operations available on all DStreams, such as `map`, `filter` and
* `window`. In addition, [[org.apache.spark.streaming.PairDStreamFunctions]] contains operations available
* `window`. In addition, [[org.apache.spark.streaming.dstream.PairDStreamFunctions]] contains operations available
* only on DStreams of key-value pairs, such as `groupByKeyAndWindow` and `join`. These operations
* are automatically available on any DStream of the right type (e.g., DStream[(Int, Int)] through
* implicit conversions when `spark.streaming.StreamingContext._` is imported.
......
......@@ -15,17 +15,15 @@
* limitations under the License.
*/
package org.apache.spark.streaming
package org.apache.spark.streaming.dstream
import scala.collection.mutable.{HashMap, HashSet}
import scala.collection.mutable.HashMap
import scala.reflect.ClassTag
import java.io.{ObjectInputStream, IOException}
import org.apache.hadoop.fs.Path
import org.apache.hadoop.fs.FileSystem
import org.apache.spark.Logging
import java.io.{ObjectInputStream, IOException}
import org.apache.spark.streaming.Time
private[streaming]
class DStreamCheckpointData[T: ClassTag] (dstream: DStream[T])
......
......@@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.UnionRDD
import org.apache.spark.streaming.{DStreamCheckpointData, StreamingContext, Time}
import org.apache.spark.streaming.{StreamingContext, Time}
import org.apache.spark.util.TimeStampedHashMap
......
......@@ -17,7 +17,7 @@
package org.apache.spark.streaming.dstream
import org.apache.spark.streaming.{Duration, DStream, Time}
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
......
......@@ -17,7 +17,7 @@
package org.apache.spark.streaming.dstream
import org.apache.spark.streaming.{Duration, DStream, Time}
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext._
import scala.reflect.ClassTag
......
......@@ -17,7 +17,7 @@
package org.apache.spark.streaming.dstream
import org.apache.spark.streaming.{Duration, DStream, Time}
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
......
......@@ -18,7 +18,7 @@
package org.apache.spark.streaming.dstream
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Duration, DStream, Time}
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.streaming.scheduler.Job
import scala.reflect.ClassTag
......
......@@ -17,7 +17,7 @@
package org.apache.spark.streaming.dstream
import org.apache.spark.streaming.{Duration, DStream, Time}
import org.apache.spark.streaming.{Duration, Time}
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
......
......@@ -17,7 +17,7 @@
package org.apache.spark.streaming.dstream
import org.apache.spark.streaming.{Time, Duration, StreamingContext, DStream}
import org.apache.spark.streaming.{Time, Duration, StreamingContext}
import scala.reflect.ClassTag
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment