diff --git a/core/src/main/scala/spark/Accumulators.scala b/core/src/main/scala/spark/Accumulators.scala
index bf774178526142976da87b035bd9574b0fbca2c5..4f4b515e6496a77a8387ec2aff36702b3b1c0bd6 100644
--- a/core/src/main/scala/spark/Accumulators.scala
+++ b/core/src/main/scala/spark/Accumulators.scala
@@ -35,7 +35,16 @@ class Accumulable[T,R] (
     else throw new UnsupportedOperationException("Can't use read value in task")
   }
 
-  private[spark] def localValue = value_
+  /**
+   * Get the current value of this accumulator from within a task.
+   *
+   * This is NOT the global value of the accumulator.  To get the global value after a
+   * completed operation on the dataset, call `value`.
+   *
+   * The typical use of this method is to directly mutate the local value, eg., to add
+   * an element to a Set.
+   */
+  def localValue = value_
 
   def value_= (t: T) {
     if (!deserialized) value_ = t
diff --git a/core/src/test/scala/spark/AccumulatorSuite.scala b/core/src/test/scala/spark/AccumulatorSuite.scala
index a59b77fc857d12197b469f7d6cefc6601dbf5842..8d27cfe0e2c84d26e69b4b84faeb6968999803bc 100644
--- a/core/src/test/scala/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/spark/AccumulatorSuite.scala
@@ -79,4 +79,19 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers {
     }
   }
 
+  test ("localValue readable in tasks") {
+    import SetAccum._
+    val maxI = 1000
+    for (nThreads <- List(1, 10)) { //test single & multi-threaded
+    val sc = new SparkContext("local[" + nThreads + "]", "test")
+      val acc: Accumulable[mutable.Set[Any], Any] = sc.accumulable(new mutable.HashSet[Any]())
+      val groupedInts = (1 to (maxI/20)).map {x => (20 * (x - 1) to 20 * x).toSet}
+      val d = sc.parallelize(groupedInts)
+      d.foreach {
+        x => acc.localValue ++= x
+      }
+      acc.value should be ( (0 to maxI).toSet)
+    }
+  }
+
 }
\ No newline at end of file