From 9451fed52cb8a00c706b582a0b51d8cd832f9350 Mon Sep 17 00:00:00 2001
From: Andrew Or <andrew@databricks.com>
Date: Wed, 17 Feb 2016 16:17:20 -0800
Subject: [PATCH] [SPARK-13344][TEST] Fix harmless accumulator not found
 exceptions

See [JIRA](https://issues.apache.org/jira/browse/SPARK-13344) for more detail. This was caused by #10835.

Author: Andrew Or <andrew@databricks.com>

Closes #11222 from andrewor14/fix-test-accum-exceptions.
---
 .../org/apache/spark/AccumulatorSuite.scala    |  8 ++++++++
 .../spark/InternalAccumulatorSuite.scala       |  8 ++++++++
 .../scala/org/apache/spark/SparkFunSuite.scala | 18 ++++++++++++++----
 3 files changed, 30 insertions(+), 4 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 4d49fe5159..8acd0439b6 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -34,6 +34,14 @@ import org.apache.spark.serializer.JavaSerializer
 class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContext {
   import AccumulatorParam._
 
+  override def afterEach(): Unit = {
+    try {
+      Accumulators.clear()
+    } finally {
+      super.afterEach()
+    }
+  }
+
   implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] =
     new AccumulableParam[mutable.Set[A], A] {
       def addInPlace(t1: mutable.Set[A], t2: mutable.Set[A]) : mutable.Set[A] = {
diff --git a/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala
index c426bb7a4e..474550608b 100644
--- a/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala
@@ -28,6 +28,14 @@ class InternalAccumulatorSuite extends SparkFunSuite with LocalSparkContext {
   import InternalAccumulator._
   import AccumulatorParam._
 
+  override def afterEach(): Unit = {
+    try {
+      Accumulators.clear()
+    } finally {
+      super.afterEach()
+    }
+  }
+
   test("get param") {
     assert(getParam(EXECUTOR_DESERIALIZE_TIME) === LongAccumulatorParam)
     assert(getParam(EXECUTOR_RUN_TIME) === LongAccumulatorParam)
diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
index d3359c7406..99366a32c4 100644
--- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -18,14 +18,26 @@
 package org.apache.spark
 
 // scalastyle:off
-import org.scalatest.{FunSuite, Outcome}
+import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}
 
 /**
  * Base abstract class for all unit tests in Spark for handling common functionality.
  */
-private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
+private[spark] abstract class SparkFunSuite
+  extends FunSuite
+  with BeforeAndAfterAll
+  with Logging {
 // scalastyle:on
 
+  protected override def afterAll(): Unit = {
+    try {
+      // Avoid leaking map entries in tests that use accumulators without SparkContext
+      Accumulators.clear()
+    } finally {
+      super.afterAll()
+    }
+  }
+
   /**
    * Log the suite name and the test name before and after each test.
    *
@@ -42,8 +54,6 @@ private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
       test()
     } finally {
       logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
-      // Avoid leaking map entries in tests that use accumulators without SparkContext
-      Accumulators.clear()
     }
   }
 
-- 
GitLab