diff --git a/core/src/main/scala/spark/CacheTracker.scala b/core/src/main/scala/spark/CacheTracker.scala
index 6826c7897cae6a907ac17b88c117d2e44a336bac..223c5dc5f7d555efc9966190ea24b8dce88627e8 100644
--- a/core/src/main/scala/spark/CacheTracker.scala
+++ b/core/src/main/scala/spark/CacheTracker.scala
@@ -96,7 +96,7 @@ class CacheTracker(isMaster: Boolean, theCache: Cache) extends Logging {
   // Get a snapshot of the currently known locations
   def getLocationsSnapshot(): HashMap[Int, Array[List[String]]] = {
     (trackerActor !? GetCacheLocations) match {
-      case h: HashMap[Int, Array[List[String]]] => h
+      case h: HashMap[_, _] => h.asInstanceOf[HashMap[Int, Array[List[String]]]]
       case _ => throw new SparkException(
           "Internal error: CacheTrackerActor did not reply with a HashMap")
     }
diff --git a/core/src/main/scala/spark/CoGroupedRDD.scala b/core/src/main/scala/spark/CoGroupedRDD.scala
index ea9e2d38a9a6b083f18c8084be52217842870d0a..101c8c99d8f41b26902460a83ed14d948c6574a0 100644
--- a/core/src/main/scala/spark/CoGroupedRDD.scala
+++ b/core/src/main/scala/spark/CoGroupedRDD.scala
@@ -75,8 +75,8 @@ extends RDD[(K, Seq[Seq[_]])](rdds.head.context) with Logging {
     for ((dep, depNum) <- split.deps.zipWithIndex) dep match {
       case NarrowCoGroupSplitDep(rdd, itsSplit) => {
         // Read them from the parent
-        for ((k: K, v) <- rdd.iterator(itsSplit)) {
-          getSeq(k)(depNum) += v
+        for ((k, v) <- rdd.iterator(itsSplit)) {
+          getSeq(k.asInstanceOf[K])(depNum) += v
         }
       }
       case ShuffleCoGroupSplitDep(shuffleId) => {
diff --git a/project/build/SparkProject.scala b/project/build/SparkProject.scala
index a6520d1f03bf7ab957ec491bfe5f5b426372b86a..634acab7026c6a74c198e069a90c9e6cdf21b624 100644
--- a/project/build/SparkProject.scala
+++ b/project/build/SparkProject.scala
@@ -6,27 +6,24 @@ import assembly._
 import de.element34.sbteclipsify._
 
 
-class SparkProject(info: ProjectInfo)
-extends ParentProject(info) with IdeaProject
-{
+class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProject {
+
   lazy val core = project("core", "Spark Core", new CoreProject(_))
 
-  lazy val examples =
-    project("examples", "Spark Examples", new ExamplesProject(_), core)
+  lazy val examples = project("examples", "Spark Examples", new ExamplesProject(_), core)
 
   lazy val bagel = project("bagel", "Bagel", new BagelProject(_), core)
 
-  class CoreProject(info: ProjectInfo)
-  extends DefaultProject(info) with Eclipsify with IdeaProject with DepJar with XmlTestReport
-  {}
+  trait BaseProject extends BasicScalaProject with ScalaPaths with Eclipsify with IdeaProject {
+    override def compileOptions = super.compileOptions ++ Seq(Unchecked)
+  }
+
+  class CoreProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
 
-  class ExamplesProject(info: ProjectInfo)
-  extends DefaultProject(info) with Eclipsify with IdeaProject
-  {}
+  class ExamplesProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject
 
-  class BagelProject(info: ProjectInfo)
-  extends DefaultProject(info) with DepJar with XmlTestReport
-  {}
+  class BagelProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
+	
 }