From 244cbbe33a3f1e2566cde322eef2a02a11d35096 Mon Sep 17 00:00:00 2001 From: Imran Rashid <imran@quantifind.com> Date: Mon, 16 Jul 2012 18:26:48 -0700 Subject: [PATCH] one more minor cleanup to scaladoc --- core/src/main/scala/spark/Accumulators.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/spark/Accumulators.scala b/core/src/main/scala/spark/Accumulators.scala index 16e3657898..e63651fcb0 100644 --- a/core/src/main/scala/spark/Accumulators.scala +++ b/core/src/main/scala/spark/Accumulators.scala @@ -25,7 +25,7 @@ class Accumulable[T,R] ( /** * merge two accumulable objects together - * <p> + * * Normally, a user will not want to use this version, but will instead call `+=`. * @param term the other Accumulable that will get merged with this */ @@ -64,7 +64,7 @@ trait AccumulatorParam[T] extends AccumulableParam[T,T] { /** * A datatype that can be accumulated, ie. has a commutative & associative +. - * + * * You must define how to add data, and how to merge two of these together. For some datatypes, these might be * the same operation (eg., a counter). In that case, you might want to use [[spark.AccumulatorParam]]. They won't * always be the same, though -- eg., imagine you are accumulating a set. You will add items to the set, and you -- GitLab