diff --git a/conf/metrics.properties.template b/conf/metrics.properties.template
index b4204e1deb77cd449ed77ddd5ae44c926160957d..c7e24aa36cb800ad6cc446fa561368dcea1ea3bd 100644
--- a/conf/metrics.properties.template
+++ b/conf/metrics.properties.template
@@ -1,5 +1,7 @@
 # syntax: [instance].[sink|source].[name].[options]
 
+#*.sink.console.class=spark.metrics.sink.ConsoleSink
+
 #*.sink.console.period=10
 
 #*.sink.console.unit=second
diff --git a/core/src/main/scala/spark/deploy/master/MasterSource.scala b/core/src/main/scala/spark/deploy/master/MasterSource.scala
index f94e5b2c342397aed7ee3ebb44f5d6e52d930b1b..65c22320d600ae08ff9b0e8cb9454ae61025156e 100644
--- a/core/src/main/scala/spark/deploy/master/MasterSource.scala
+++ b/core/src/main/scala/spark/deploy/master/MasterSource.scala
@@ -1,6 +1,6 @@
 package spark.deploy.master
 
-import com.codahale.metrics.{Gauge,MetricRegistry}
+import com.codahale.metrics.{Gauge, MetricRegistry}
 
 import spark.metrics.source.Source
 
@@ -10,7 +10,7 @@ private[spark] class MasterSource(val master: Master) extends Source {
 
   // Gauge for worker numbers in cluster
   metricRegistry.register(MetricRegistry.name("workers","number"), new Gauge[Int] {
-      override def getValue: Int = master.workers.size
+    override def getValue: Int = master.workers.size
   })
 
   // Gauge for application numbers in cluster
diff --git a/core/src/main/scala/spark/metrics/MetricsConfig.scala b/core/src/main/scala/spark/metrics/MetricsConfig.scala
index b1f6a1e596e76a27ba624a61ea9fce3fffae8f64..5066b7ac22cbc0fee79b5fba1a70f0dff4ae783b 100644
--- a/core/src/main/scala/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/spark/metrics/MetricsConfig.scala
@@ -3,14 +3,14 @@ package spark.metrics
 import java.util.Properties
 import java.io.{File, FileInputStream}
 
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
 import scala.util.matching.Regex
 
 private[spark] class MetricsConfig(val configFile: String) {
   val properties = new Properties()
   val DEFAULT_PREFIX = "*"
   val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
-  var propertyCategories: HashMap[String, Properties] = null
+  var propertyCategories: mutable.HashMap[String, Properties] = null
 
   private def setDefaultProperties(prop: Properties) {
     prop.setProperty("*.sink.jmx.enabled", "default")
@@ -43,8 +43,8 @@ private[spark] class MetricsConfig(val configFile: String) {
     }
   }
 
-  def subProperties(prop: Properties, regex: Regex): HashMap[String, Properties] = {
-    val subProperties = new HashMap[String, Properties]
+  def subProperties(prop: Properties, regex: Regex): mutable.HashMap[String, Properties] = {
+    val subProperties = new mutable.HashMap[String, Properties]
     import scala.collection.JavaConversions._
     prop.foreach { kv =>
       if (regex.findPrefixOf(kv._1) != None) {
diff --git a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
index c67c0ee9123fee9a540fea06c57d509a9404b619..437f24a5759cc03a48fffab391132afe25c8829b 100644
--- a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
@@ -8,7 +8,6 @@ import java.util.concurrent.TimeUnit
 import spark.metrics.MetricsSystem
 
 class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink {
-
   val CONSOLE_DEFAULT_PERIOD = "10"
   val CONSOLE_DEFAULT_UNIT = "second"