From 8d1ef7f2dfc453137b8dbbb72a7f1ca93b57bb15 Mon Sep 17 00:00:00 2001
From: jerryshao <saisai.shao@intel.com>
Date: Thu, 4 Jul 2013 10:11:30 +0800
Subject: [PATCH] Code style changes

---
 conf/metrics.properties.template                          | 2 ++
 .../src/main/scala/spark/deploy/master/MasterSource.scala | 4 ++--
 core/src/main/scala/spark/metrics/MetricsConfig.scala     | 8 ++++----
 core/src/main/scala/spark/metrics/sink/ConsoleSink.scala  | 1 -
 4 files changed, 8 insertions(+), 7 deletions(-)

diff --git a/conf/metrics.properties.template b/conf/metrics.properties.template
index b4204e1deb..c7e24aa36c 100644
--- a/conf/metrics.properties.template
+++ b/conf/metrics.properties.template
@@ -1,5 +1,7 @@
 # syntax: [instance].[sink|source].[name].[options]
 
+#*.sink.console.class=spark.metrics.sink.ConsoleSink
+
 #*.sink.console.period=10
 
 #*.sink.console.unit=second
diff --git a/core/src/main/scala/spark/deploy/master/MasterSource.scala b/core/src/main/scala/spark/deploy/master/MasterSource.scala
index f94e5b2c34..65c22320d6 100644
--- a/core/src/main/scala/spark/deploy/master/MasterSource.scala
+++ b/core/src/main/scala/spark/deploy/master/MasterSource.scala
@@ -1,6 +1,6 @@
 package spark.deploy.master
 
-import com.codahale.metrics.{Gauge,MetricRegistry}
+import com.codahale.metrics.{Gauge, MetricRegistry}
 
 import spark.metrics.source.Source
 
@@ -10,7 +10,7 @@ private[spark] class MasterSource(val master: Master) extends Source {
 
   // Gauge for worker numbers in cluster
   metricRegistry.register(MetricRegistry.name("workers","number"), new Gauge[Int] {
-      override def getValue: Int = master.workers.size
+    override def getValue: Int = master.workers.size
   })
 
   // Gauge for application numbers in cluster
diff --git a/core/src/main/scala/spark/metrics/MetricsConfig.scala b/core/src/main/scala/spark/metrics/MetricsConfig.scala
index b1f6a1e596..5066b7ac22 100644
--- a/core/src/main/scala/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/spark/metrics/MetricsConfig.scala
@@ -3,14 +3,14 @@ package spark.metrics
 import java.util.Properties
 import java.io.{File, FileInputStream}
 
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
 import scala.util.matching.Regex
 
 private[spark] class MetricsConfig(val configFile: String) {
   val properties = new Properties()
   val DEFAULT_PREFIX = "*"
   val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
-  var propertyCategories: HashMap[String, Properties] = null
+  var propertyCategories: mutable.HashMap[String, Properties] = null
 
   private def setDefaultProperties(prop: Properties) {
     prop.setProperty("*.sink.jmx.enabled", "default")
@@ -43,8 +43,8 @@ private[spark] class MetricsConfig(val configFile: String) {
     }
   }
 
-  def subProperties(prop: Properties, regex: Regex): HashMap[String, Properties] = {
-    val subProperties = new HashMap[String, Properties]
+  def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
+    val subProperties = new mutable.HashMap[String, Properties]
     import scala.collection.JavaConversions._
     prop.foreach { kv =>
       if (regex.findPrefixOf(kv._1) != None) {
diff --git a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
index c67c0ee912..437f24a575 100644
--- a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
@@ -8,7 +8,6 @@ import java.util.concurrent.TimeUnit
 import spark.metrics.MetricsSystem
 
 class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink {
-
   val CONSOLE_DEFAULT_PERIOD = "10"
   val CONSOLE_DEFAULT_UNIT = "second"
 
-- 
GitLab