Skip to content
Snippets Groups Projects
Commit 17db6a90 authored by Prashant Sharma's avatar Prashant Sharma
Browse files

Style fixes and addressed review comments at #221

parent c1201f47
No related branches found
No related tags found
No related merge requests found
Showing
with 105 additions and 98 deletions
......@@ -41,27 +41,27 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel_${scala-short.version}</artifactId>
<artifactId>spark-bagel_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_${scala-short.version}</artifactId>
<artifactId>spark-mllib_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-repl_${scala-short.version}</artifactId>
<artifactId>spark-repl_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_${scala-short.version}</artifactId>
<artifactId>spark-streaming_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
......@@ -79,7 +79,7 @@
<artifactId>maven-shade-plugin</artifactId>
<configuration>
<shadedArtifactAttached>false</shadedArtifactAttached>
<outputFile>${project.build.directory}/scala-${scala-short.version}/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</outputFile>
<outputFile>${project.build.directory}/scala-2.10/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</outputFile>
<artifactSet>
<includes>
<include>*:*</include>
......@@ -128,7 +128,7 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-yarn_${scala-short.version}</artifactId>
<artifactId>spark-yarn_2.10</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
......
......@@ -34,7 +34,7 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
......@@ -43,18 +43,18 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.scalatest</groupId>
......
......@@ -86,7 +86,7 @@
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>chill_${scala-short.version}</artifactId>
<artifactId>chill_2.10</artifactId>
<version>0.3.1</version>
</dependency>
<dependency>
......@@ -96,11 +96,11 @@
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_${scala-short.version}</artifactId>
<artifactId>akka-remote_2.10</artifactId>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_${scala-short.version}</artifactId>
<artifactId>akka-slf4j_2.10</artifactId>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
......@@ -108,7 +108,7 @@
</dependency>
<dependency>
<groupId>net.liftweb</groupId>
<artifactId>lift-json_${scala-short.version}</artifactId>
<artifactId>lift-json_2.10</artifactId>
</dependency>
<dependency>
<groupId>it.unimi.dsi</groupId>
......@@ -120,7 +120,7 @@
</dependency>
<dependency>
<groupId>com.github.scala-incubator.io</groupId>
<artifactId>scala-io-file_${scala-short.version}</artifactId>
<artifactId>scala-io-file_2.10</artifactId>
</dependency>
<dependency>
<groupId>org.apache.mesos</groupId>
......@@ -166,12 +166,12 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
......@@ -191,8 +191,8 @@
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
......
......@@ -72,12 +72,11 @@ private[spark] class MapOutputTracker extends Logging {
// throw a SparkException if this fails.
private def askTracker(message: Any): Any = {
try {
val future = if (trackerActor.isLeft ) {
trackerActor.left.get.ask(message)(timeout)
} else {
trackerActor.right.get.ask(message)(timeout)
val future = trackerActor match {
case Left(a: ActorRef) => a.ask(message)(timeout)
case Right(b: ActorSelection) => b.ask(message)(timeout)
}
return Await.result(future, timeout)
Await.result(future, timeout)
} catch {
case e: Exception =>
throw new SparkException("Error communicating with MapOutputTracker", e)
......
......@@ -17,11 +17,11 @@
package org.apache.spark
import org.apache.spark.util.Utils
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
import org.apache.spark.rdd.RDD
import org.apache.spark.util.Utils
/**
* An object that defines how the elements in a key-value pair RDD are partitioned by key.
* Maps each key to a partition ID, from 0 to `numPartitions - 1`.
......
......@@ -17,23 +17,19 @@
package org.apache.spark.deploy.worker.ui
import akka.actor.ActorRef
import akka.util.Timeout
import java.io.File
import scala.concurrent.duration._
import java.io.{FileInputStream, File}
import akka.util.Timeout
import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.{Handler, Server}
import org.apache.spark.Logging
import org.apache.spark.deploy.worker.Worker
import org.apache.spark.{Logging}
import org.apache.spark.ui.JettyUtils
import org.apache.spark.ui.{JettyUtils, UIUtils}
import org.apache.spark.ui.JettyUtils._
import org.apache.spark.ui.UIUtils
import org.apache.spark.util.Utils
import org.eclipse.jetty.server.{Handler, Server}
/**
* Web UI server for the standalone worker.
......
......@@ -17,16 +17,13 @@
package org.apache.spark.rdd
import java.io.IOException
import scala.reflect.ClassTag
import org.apache.hadoop.fs.Path
import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.hadoop.mapred.{FileInputFormat, SequenceFileInputFormat, JobConf, Reporter}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.{NullWritable, BytesWritable}
import org.apache.hadoop.util.ReflectionUtils
import org.apache.hadoop.fs.Path
import java.io.{File, IOException, EOFException}
import java.text.NumberFormat
private[spark] class CheckpointRDDPartition(val index: Int) extends Partition {}
......
......@@ -20,6 +20,7 @@ package org.apache.spark.rdd
import java.sql.{Connection, ResultSet}
import scala.reflect.ClassTag
import org.apache.spark.{Logging, Partition, SparkContext, TaskContext}
import org.apache.spark.util.NextIterator
......
......@@ -17,9 +17,10 @@
package org.apache.spark.rdd
import org.apache.spark.{Partition, TaskContext}
import scala.reflect.ClassTag
import org.apache.spark.{Partition, TaskContext}
private[spark]
class MappedRDD[U: ClassTag, T: ClassTag](prev: RDD[T], f: T => U)
extends RDD[U](prev) {
......
......@@ -17,9 +17,10 @@
package org.apache.spark.rdd
import org.apache.spark.{RangePartitioner, Logging}
import scala.reflect.ClassTag
import org.apache.spark.{Logging, RangePartitioner}
/**
* Extra functions available on RDDs of (key, value) pairs where the key is sortable through
* an implicit conversion. Import `org.apache.spark.SparkContext._` at the top of your program to
......
......@@ -17,9 +17,10 @@
package org.apache.spark.rdd
import org.apache.spark.{Dependency, Partitioner, SparkEnv, ShuffleDependency, Partition, TaskContext}
import scala.reflect.ClassTag
import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency,
SparkEnv, TaskContext}
private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
override val index = idx
......
......@@ -157,10 +157,9 @@ private[spark] class BlockManagerMaster(var driverActor : Either[ActorRef, Actor
while (attempts < AKKA_RETRY_ATTEMPTS) {
attempts += 1
try {
val future = if (driverActor.isLeft ) {
driverActor.left.get.ask(message)(timeout)
} else {
driverActor.right.get.ask(message)(timeout)
val future = driverActor match {
case Left(a: ActorRef) => a.ask(message)(timeout)
case Right(b: ActorSelection) => b.ask(message)(timeout)
}
val result = Await.result(future, timeout)
if (result == null) {
......
/**
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Must be in akka.actor package as ActorSystemImpl is protected[akka].
......
......@@ -49,25 +49,25 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_${scala-short.version}</artifactId>
<artifactId>spark-streaming_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_${scala-short.version}</artifactId>
<artifactId>spark-mllib_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel_${scala-short.version}</artifactId>
<artifactId>spark-bagel_2.10</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
......@@ -88,7 +88,7 @@
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.0-beta1</version>
<exclusions>
<exclusion>
......@@ -107,17 +107,17 @@
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>algebird-core_${scala-short.version}</artifactId>
<artifactId>algebird-core_2.10</artifactId>
<version>0.1.11</version>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
......@@ -166,8 +166,8 @@
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
......
......@@ -34,7 +34,7 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
......@@ -48,12 +48,12 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
......@@ -63,8 +63,8 @@
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.scalatest</groupId>
......
......@@ -101,7 +101,6 @@
<java.version>1.6</java.version>
<scala-short.version>2.10</scala-short.version>
<scala.version>2.10.3</scala.version>
<mesos.version>0.13.0</mesos.version>
<akka.version>2.2.3</akka.version>
......@@ -205,7 +204,7 @@
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>chill_${scala-short.version}</artifactId>
<artifactId>chill_2.10</artifactId>
<version>0.3.1</version>
</dependency>
<dependency>
......@@ -215,7 +214,7 @@
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_${scala-short.version}</artifactId>
<artifactId>akka-actor_2.10</artifactId>
<version>${akka.version}</version>
<exclusions>
<exclusion>
......@@ -226,7 +225,7 @@
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_${scala-short.version}</artifactId>
<artifactId>akka-remote_2.10</artifactId>
<version>${akka.version}</version>
<exclusions>
<exclusion>
......@@ -237,7 +236,7 @@
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_${scala-short.version}</artifactId>
<artifactId>akka-slf4j_2.10</artifactId>
<version>${akka.version}</version>
<exclusions>
<exclusion>
......@@ -258,7 +257,7 @@
</dependency>
<dependency>
<groupId>com.github.scala-incubator.io</groupId>
<artifactId>scala-io-file_${scala-short.version}</artifactId>
<artifactId>scala-io-file_2.10</artifactId>
<version>0.4.1</version>
</dependency>
<dependency>
......@@ -279,7 +278,7 @@
</dependency>
<dependency>
<groupId>net.liftweb</groupId>
<artifactId>lift-json_${scala-short.version}</artifactId>
<artifactId>lift-json_2.10</artifactId>
<version>2.5.1</version>
<exclusions>
<exclusion>
......@@ -335,7 +334,7 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<version>1.9.1</version>
<scope>test</scope>
</dependency>
......@@ -358,7 +357,7 @@
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<version>1.10.0</version>
<scope>test</scope>
</dependency>
......
......@@ -40,18 +40,18 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel_${scala-short.version}</artifactId>
<artifactId>spark-bagel_2.10</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-repl_${scala-short.version}</artifactId>
<artifactId>spark-repl_2.10</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
</dependency>
......
......@@ -39,18 +39,18 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-bagel_${scala-short.version}</artifactId>
<artifactId>spark-bagel_2.10</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_${scala-short.version}</artifactId>
<artifactId>spark-mllib_2.10</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
</dependency>
......@@ -78,18 +78,18 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
......
......@@ -48,7 +48,7 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala-short.version}</artifactId>
<artifactId>spark-core_2.10</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
......@@ -61,8 +61,8 @@
<version>1.9.11</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<groupId>com.sksamuel.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.0-beta1</version>
<exclusions>
<exclusion>
......@@ -111,7 +111,7 @@
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-zeromq_${scala-short.version}</artifactId>
<artifactId>akka-zeromq_2.10</artifactId>
<version>${akka.version}</version>
<exclusions>
<exclusion>
......@@ -122,12 +122,12 @@
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala-short.version}</artifactId>
<artifactId>scalatest_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala-short.version}</artifactId>
<artifactId>scalacheck_2.10</artifactId>
<scope>test</scope>
</dependency>
<dependency>
......@@ -151,8 +151,8 @@
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala-short.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala-short.version}/test-classes</testOutputDirectory>
<outputDirectory>target/scala-2.10/classes</outputDirectory>
<testOutputDirectory>target/scala-2.10/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.scalatest</groupId>
......
......@@ -17,14 +17,14 @@
package org.apache.spark.streaming.receivers
import scala.reflect.ClassTag
import akka.actor.Actor
import akka.util.ByteString
import akka.zeromq._
import org.apache.spark.Logging
import scala.reflect.ClassTag
/**
* A receiver to subscribe to ZeroMQ stream.
*/
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment