Skip to content
Snippets Groups Projects
Commit d7f08932 authored by Matei Zaharia's avatar Matei Zaharia
Browse files

Fixed AccumulatorSuite to clean up SparkContext with BeforeAndAfter

parent f7149c5e
No related branches found
No related tags found
No related merge requests found
package spark
import org.scalatest.BeforeAndAfter
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import collection.mutable
......@@ -8,32 +9,39 @@ import scala.math.exp
import scala.math.signum
import spark.SparkContext._
class AccumulatorSuite extends FunSuite with ShouldMatchers {
class AccumulatorSuite extends FunSuite with ShouldMatchers with BeforeAndAfter {
var sc: SparkContext = null
after {
if (sc != null) {
sc.stop()
sc = null
}
}
test ("basic accumulation"){
val sc = new SparkContext("local", "test")
sc = new SparkContext("local", "test")
val acc : Accumulator[Int] = sc.accumulator(0)
val d = sc.parallelize(1 to 20)
d.foreach{x => acc += x}
acc.value should be (210)
sc.stop()
}
test ("value not assignable from tasks") {
val sc = new SparkContext("local", "test")
sc = new SparkContext("local", "test")
val acc : Accumulator[Int] = sc.accumulator(0)
val d = sc.parallelize(1 to 20)
evaluating {d.foreach{x => acc.value = x}} should produce [Exception]
sc.stop()
}
test ("add value to collection accumulators") {
import SetAccum._
val maxI = 1000
for (nThreads <- List(1, 10)) { //test single & multi-threaded
val sc = new SparkContext("local[" + nThreads + "]", "test")
sc = new SparkContext("local[" + nThreads + "]", "test")
val acc: Accumulable[mutable.Set[Any], Any] = sc.accumulable(new mutable.HashSet[Any]())
val d = sc.parallelize(1 to maxI)
d.foreach {
......@@ -44,6 +52,7 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers {
v should contain(i)
}
sc.stop()
sc = null
}
}
......@@ -67,16 +76,17 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers {
import SetAccum._
val maxI = 1000
for (nThreads <- List(1, 10)) { //test single & multi-threaded
val sc = new SparkContext("local[" + nThreads + "]", "test")
sc = new SparkContext("local[" + nThreads + "]", "test")
val acc: Accumulable[mutable.Set[Any], Any] = sc.accumulable(new mutable.HashSet[Any]())
val d = sc.parallelize(1 to maxI)
val thrown = evaluating {
evaluating {
d.foreach {
x => acc.value += x
}
} should produce [SparkException]
println(thrown)
sc.stop()
sc = null
}
}
}
\ No newline at end of file
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment