Skip to content
Snippets Groups Projects
Commit e09f4a96 authored by Martin Weindel's avatar Martin Weindel
Browse files

fixed some warnings

parent 9b0c9c89
No related branches found
No related tags found
No related merge requests found
Showing
with 21 additions and 23 deletions
...@@ -19,8 +19,6 @@ package org.apache.spark.network.netty; ...@@ -19,8 +19,6 @@ package org.apache.spark.network.netty;
import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel; import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelOption; import io.netty.channel.ChannelOption;
import io.netty.channel.oio.OioEventLoopGroup; import io.netty.channel.oio.OioEventLoopGroup;
import io.netty.channel.socket.oio.OioSocketChannel; import io.netty.channel.socket.oio.OioSocketChannel;
......
...@@ -20,7 +20,6 @@ package org.apache.spark.network.netty; ...@@ -20,7 +20,6 @@ package org.apache.spark.network.netty;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import io.netty.bootstrap.ServerBootstrap; import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption; import io.netty.channel.ChannelOption;
import io.netty.channel.oio.OioEventLoopGroup; import io.netty.channel.oio.OioEventLoopGroup;
......
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
package org.apache.spark.api.java; package org.apache.spark.api.java;
import java.util.Arrays;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
......
...@@ -18,8 +18,6 @@ ...@@ -18,8 +18,6 @@
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function;
import scala.runtime.AbstractFunction1;
import java.io.Serializable; import java.io.Serializable;
/** /**
......
...@@ -19,7 +19,6 @@ package org.apache.spark.api.java.function; ...@@ -19,7 +19,6 @@ package org.apache.spark.api.java.function;
import scala.reflect.ClassTag; import scala.reflect.ClassTag;
import scala.reflect.ClassTag$; import scala.reflect.ClassTag$;
import scala.runtime.AbstractFunction1;
import java.io.Serializable; import java.io.Serializable;
......
...@@ -19,7 +19,6 @@ package org.apache.spark.api.java.function; ...@@ -19,7 +19,6 @@ package org.apache.spark.api.java.function;
import scala.reflect.ClassTag; import scala.reflect.ClassTag;
import scala.reflect.ClassTag$; import scala.reflect.ClassTag$;
import scala.runtime.AbstractFunction2;
import java.io.Serializable; import java.io.Serializable;
......
...@@ -20,7 +20,6 @@ package org.apache.spark.api.java.function; ...@@ -20,7 +20,6 @@ package org.apache.spark.api.java.function;
import scala.Tuple2; import scala.Tuple2;
import scala.reflect.ClassTag; import scala.reflect.ClassTag;
import scala.reflect.ClassTag$; import scala.reflect.ClassTag$;
import scala.runtime.AbstractFunction1;
import java.io.Serializable; import java.io.Serializable;
......
...@@ -20,7 +20,6 @@ package org.apache.spark.api.java.function; ...@@ -20,7 +20,6 @@ package org.apache.spark.api.java.function;
import scala.Tuple2; import scala.Tuple2;
import scala.reflect.ClassTag; import scala.reflect.ClassTag;
import scala.reflect.ClassTag$; import scala.reflect.ClassTag$;
import scala.runtime.AbstractFunction1;
import java.io.Serializable; import java.io.Serializable;
......
...@@ -64,7 +64,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String ...@@ -64,7 +64,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
startDaemon() startDaemon()
new Socket(daemonHost, daemonPort) new Socket(daemonHost, daemonPort)
} }
case e => throw e case e: Throwable => throw e
} }
} }
} }
...@@ -198,7 +198,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String ...@@ -198,7 +198,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
} }
}.start() }.start()
} catch { } catch {
case e => { case e: Throwable => {
stopDaemon() stopDaemon()
throw e throw e
} }
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
package org.apache.spark.deploy package org.apache.spark.deploy
private[spark] object ExecutorState private[spark] object ExecutorState
extends Enumeration("LAUNCHING", "LOADING", "RUNNING", "KILLED", "FAILED", "LOST") { extends Enumeration {
val LAUNCHING, LOADING, RUNNING, KILLED, FAILED, LOST = Value val LAUNCHING, LOADING, RUNNING, KILLED, FAILED, LOST = Value
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
package org.apache.spark.deploy.master package org.apache.spark.deploy.master
private[spark] object ApplicationState private[spark] object ApplicationState
extends Enumeration("WAITING", "RUNNING", "FINISHED", "FAILED") { extends Enumeration {
type ApplicationState = Value type ApplicationState = Value
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
package org.apache.spark.deploy.master package org.apache.spark.deploy.master
private[spark] object WorkerState extends Enumeration("ALIVE", "DEAD", "DECOMMISSIONED") { private[spark] object WorkerState extends Enumeration {
type WorkerState = Value type WorkerState = Value
val ALIVE, DEAD, DECOMMISSIONED = Value val ALIVE, DEAD, DECOMMISSIONED = Value
......
...@@ -19,13 +19,14 @@ package org.apache.spark.rdd ...@@ -19,13 +19,14 @@ package org.apache.spark.rdd
import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext} import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext}
import org.apache.spark.storage.BlockManager import org.apache.spark.storage.BlockManager
import scala.reflect.ClassTag
private[spark] class BlockRDDPartition(val blockId: String, idx: Int) extends Partition { private[spark] class BlockRDDPartition(val blockId: String, idx: Int) extends Partition {
val index = idx val index = idx
} }
private[spark] private[spark]
class BlockRDD[T: ClassManifest](sc: SparkContext, @transient blockIds: Array[String]) class BlockRDD[T: ClassTag](sc: SparkContext, @transient blockIds: Array[String])
extends RDD[T](sc, Nil) { extends RDD[T](sc, Nil) {
@transient lazy val locations_ = BlockManager.blockIdsToHosts(blockIds, SparkEnv.get) @transient lazy val locations_ = BlockManager.blockIdsToHosts(blockIds, SparkEnv.get)
......
...@@ -22,6 +22,7 @@ import java.io.{ObjectOutputStream, IOException} ...@@ -22,6 +22,7 @@ import java.io.{ObjectOutputStream, IOException}
import scala.collection.mutable import scala.collection.mutable
import scala.Some import scala.Some
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
/** /**
* Class that captures a coalesced RDD by essentially keeping track of parent partitions * Class that captures a coalesced RDD by essentially keeping track of parent partitions
...@@ -68,7 +69,7 @@ case class CoalescedRDDPartition( ...@@ -68,7 +69,7 @@ case class CoalescedRDDPartition(
* @param maxPartitions number of desired partitions in the coalesced RDD * @param maxPartitions number of desired partitions in the coalesced RDD
* @param balanceSlack used to trade-off balance and locality. 1.0 is all locality, 0 is all balance * @param balanceSlack used to trade-off balance and locality. 1.0 is all locality, 0 is all balance
*/ */
class CoalescedRDD[T: ClassManifest]( class CoalescedRDD[T: ClassTag](
@transient var prev: RDD[T], @transient var prev: RDD[T],
maxPartitions: Int, maxPartitions: Int,
balanceSlack: Double = 0.10) balanceSlack: Double = 0.10)
......
...@@ -18,12 +18,13 @@ ...@@ -18,12 +18,13 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext} import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext}
import scala.reflect.ClassTag
/** /**
* An RDD that is empty, i.e. has no element in it. * An RDD that is empty, i.e. has no element in it.
*/ */
class EmptyRDD[T: ClassManifest](sc: SparkContext) extends RDD[T](sc, Nil) { class EmptyRDD[T: ClassTag](sc: SparkContext) extends RDD[T](sc, Nil) {
override def getPartitions: Array[Partition] = Array.empty override def getPartitions: Array[Partition] = Array.empty
......
...@@ -18,8 +18,9 @@ ...@@ -18,8 +18,9 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.apache.spark.{OneToOneDependency, Partition, TaskContext} import org.apache.spark.{OneToOneDependency, Partition, TaskContext}
import scala.reflect.ClassTag
private[spark] class FilteredRDD[T: ClassManifest]( private[spark] class FilteredRDD[T: ClassTag](
prev: RDD[T], prev: RDD[T],
f: T => Boolean) f: T => Boolean)
extends RDD[T](prev) { extends RDD[T](prev) {
......
...@@ -18,10 +18,11 @@ ...@@ -18,10 +18,11 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.apache.spark.{Partition, TaskContext} import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
private[spark] private[spark]
class FlatMappedRDD[U: ClassManifest, T: ClassManifest]( class FlatMappedRDD[U: ClassTag, T: ClassTag](
prev: RDD[T], prev: RDD[T],
f: T => TraversableOnce[U]) f: T => TraversableOnce[U])
extends RDD[U](prev) { extends RDD[U](prev) {
......
...@@ -18,8 +18,9 @@ ...@@ -18,8 +18,9 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.apache.spark.{Partition, TaskContext} import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
private[spark] class GlommedRDD[T: ClassManifest](prev: RDD[T]) private[spark] class GlommedRDD[T: ClassTag](prev: RDD[T])
extends RDD[Array[T]](prev) { extends RDD[Array[T]](prev) {
override def getPartitions: Array[Partition] = firstParent[T].partitions override def getPartitions: Array[Partition] = firstParent[T].partitions
......
...@@ -18,10 +18,11 @@ ...@@ -18,10 +18,11 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.apache.spark.{Partition, TaskContext} import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
private[spark] private[spark]
class MapPartitionsRDD[U: ClassManifest, T: ClassManifest]( class MapPartitionsRDD[U: ClassTag, T: ClassTag](
prev: RDD[T], prev: RDD[T],
f: Iterator[T] => Iterator[U], f: Iterator[T] => Iterator[U],
preservesPartitioning: Boolean = false) preservesPartitioning: Boolean = false)
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.apache.spark.{Partition, TaskContext} import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
/** /**
...@@ -26,7 +27,7 @@ import org.apache.spark.{Partition, TaskContext} ...@@ -26,7 +27,7 @@ import org.apache.spark.{Partition, TaskContext}
* information such as the number of tuples in a partition. * information such as the number of tuples in a partition.
*/ */
private[spark] private[spark]
class MapPartitionsWithIndexRDD[U: ClassManifest, T: ClassManifest]( class MapPartitionsWithIndexRDD[U: ClassTag, T: ClassTag](
prev: RDD[T], prev: RDD[T],
f: (Int, Iterator[T]) => Iterator[U], f: (Int, Iterator[T]) => Iterator[U],
preservesPartitioning: Boolean preservesPartitioning: Boolean
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment