diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index eae71571dbbb032839a1297743025ca41d43929c..b43aca2b97facac3b68dd84355469027f3ed78dd 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -260,6 +260,7 @@ class SparkContext(
 
   // Stop the SparkContext
   def stop() {
+    remote.shutdownServerModule()
     dagScheduler.stop()
     dagScheduler = null
     taskScheduler = null
diff --git a/core/src/main/scala/spark/network/ConnectionManager.scala b/core/src/main/scala/spark/network/ConnectionManager.scala
index f0b942c49250c42fe50701166d94f912816ec27f..3222187990eaa630997d012a0babbcd1a61cbb20 100644
--- a/core/src/main/scala/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/spark/network/ConnectionManager.scala
@@ -334,14 +334,14 @@ class ConnectionManager(port: Int) extends Logging {
       selectorThread.interrupt()
       selectorThread.join()
       selector.close()
+      val connections = connectionsByKey.values
+      connections.foreach(_.close())
+      if (connectionsByKey.size != 0) {
+        logWarning("All connections not cleaned up")
+      }
+      handleMessageExecutor.shutdown()
+      logInfo("ConnectionManager stopped")
     }
-    val connections = connectionsByKey.values
-    connections.foreach(_.close())
-    if (connectionsByKey.size != 0) {
-      logWarning("All connections not cleaned up")
-    }
-    handleMessageExecutor.shutdown()
-    logInfo("ConnectionManager stopped")
   }
 }
 
diff --git a/core/src/main/scala/spark/storage/BlockManager.scala b/core/src/main/scala/spark/storage/BlockManager.scala
index 999bbc2128c3adc76a498bf04fb87a53192cb456..9e4816f7ce1418c6bed93c82b74284546a102cb6 100644
--- a/core/src/main/scala/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/spark/storage/BlockManager.scala
@@ -573,6 +573,7 @@ class BlockManager(maxMemory: Long, val serializer: Serializer) extends Logging
     blockInfo.clear()
     memoryStore.clear()
     diskStore.clear()
+    logInfo("BlockManager stopped")
   }
 }
 
diff --git a/core/src/main/scala/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/spark/storage/BlockManagerMaster.scala
index 85edbbe0cd1ab92ce74a04bee525f0f34d672ecb..d8400a1f65bde55736dfdae9f8a19ab624615e85 100644
--- a/core/src/main/scala/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/spark/storage/BlockManagerMaster.scala
@@ -352,7 +352,11 @@ object BlockManagerMaster extends Logging {
   }
   
   def stopBlockManagerMaster() {
-    if (masterActor != null) masterActor.stop()
+    if (masterActor != null) {
+      masterActor.stop()
+      masterActor = null
+      logInfo("BlockManagerMaster stopped")
+    }
   }
   
   def notifyADeadHost(host: String) {
diff --git a/core/src/main/scala/spark/storage/BlockStore.scala b/core/src/main/scala/spark/storage/BlockStore.scala
index 52f2cc32e8f8a6213ee11b305387fd8729360b9d..8672a5376ebd057eff95e9e5c5748c429da39296 100644
--- a/core/src/main/scala/spark/storage/BlockStore.scala
+++ b/core/src/main/scala/spark/storage/BlockStore.scala
@@ -125,6 +125,7 @@ class MemoryStore(blockManager: BlockManager, maxMemory: Long)
       memoryStore.clear()
     }
     blockDropper.shutdown()
+    logInfo("MemoryStore cleared")
   }
 
   private def drop(blockId: String) {
diff --git a/sbt/sbt b/sbt/sbt
index 714e3d15d7b50d06a84a84dd69352f5bad72bf53..fab996728686a59cee0c0d23619b986eb15066b9 100755
--- a/sbt/sbt
+++ b/sbt/sbt
@@ -4,4 +4,4 @@ if [ "$MESOS_HOME" != "" ]; then
   EXTRA_ARGS="-Djava.library.path=$MESOS_HOME/lib/java"
 fi
 export SPARK_HOME=$(cd "$(dirname $0)/.."; pwd)
-java -Xmx800M -XX:MaxPermSize=150m $EXTRA_ARGS -jar $SPARK_HOME/sbt/sbt-launch-*.jar "$@"
+java -Xmx1200M -XX:MaxPermSize=200m $EXTRA_ARGS -jar $SPARK_HOME/sbt/sbt-launch-*.jar "$@"