Skip to content
Snippets Groups Projects
Commit 38ccd6eb authored by Daoyuan's avatar Daoyuan Committed by Reynold Xin
Browse files

move some test file to match src code

Just move some test suite to corresponding package

Author: Daoyuan <daoyuan.wang@intel.com>

Closes #1401 from adrian-wang/movetestfiles and squashes the following commits:

d1a6803 [Daoyuan] move some test file to match src code
parent aab53496
No related branches found
No related tags found
No related merge requests found
......@@ -15,14 +15,12 @@
* limitations under the License.
*/
package org.apache.spark
package org.apache.spark.broadcast
import org.apache.spark.storage.{BroadcastBlockId, _}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException}
import org.scalatest.FunSuite
import org.apache.spark.storage._
import org.apache.spark.broadcast.{Broadcast, HttpBroadcast}
import org.apache.spark.storage.BroadcastBlockId
class BroadcastSuite extends FunSuite with LocalSparkContext {
private val httpConf = broadcastConf("HttpBroadcastFactory")
......
......@@ -15,15 +15,14 @@
* limitations under the License.
*/
package org.apache.spark
import org.scalatest.FunSuite
package org.apache.spark.network
import java.nio._
import org.apache.spark.network.{ConnectionManager, Message, ConnectionManagerId}
import scala.concurrent.Await
import scala.concurrent.TimeoutException
import org.apache.spark.{SecurityManager, SparkConf}
import org.scalatest.FunSuite
import scala.concurrent.{Await, TimeoutException}
import scala.concurrent.duration._
import scala.language.postfixOps
......
......@@ -15,25 +15,21 @@
* limitations under the License.
*/
package org.apache.spark
package org.apache.spark.rdd
import java.io.File
import org.scalatest.FunSuite
import org.apache.spark.rdd.{HadoopRDD, PipedRDD, HadoopPartition}
import org.apache.hadoop.mapred.{JobConf, TextInputFormat, FileSplit}
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapred.{FileSplit, JobConf, TextInputFormat}
import org.apache.spark._
import org.scalatest.FunSuite
import scala.collection.Map
import scala.language.postfixOps
import scala.sys.process._
import scala.util.Try
import org.apache.hadoop.io.{Text, LongWritable}
import org.apache.spark.executor.TaskMetrics
class PipedRDDSuite extends FunSuite with SharedSparkContext {
test("basic pipe") {
......
......@@ -15,8 +15,9 @@
* limitations under the License.
*/
package org.apache.spark
package org.apache.spark.rdd
import org.apache.spark.SharedSparkContext
import org.scalatest.FunSuite
object ZippedPartitionsSuite {
......
......@@ -15,14 +15,14 @@
* limitations under the License.
*/
package org.apache.spark
import org.scalatest.FunSuite
package org.apache.spark.util
import akka.actor._
import org.apache.spark._
import org.apache.spark.scheduler.MapStatus
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util.AkkaUtils
import org.scalatest.FunSuite
import scala.concurrent.Await
/**
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment