From e3eb03d5b54db894670376b9c01a0c6c61aae083 Mon Sep 17 00:00:00 2001
From: "Joseph E. Gonzalez" <joseph.e.gonzalez@gmail.com>
Date: Tue, 22 Oct 2013 15:03:16 -0700
Subject: [PATCH] Starting analytics test suite.

---
 .../apache/spark/graph/AnalyticsSuite.scala   | 30 +++++++++++++++++++
 1 file changed, 30 insertions(+)
 create mode 100644 graph/src/test/scala/org/apache/spark/graph/AnalyticsSuite.scala

diff --git a/graph/src/test/scala/org/apache/spark/graph/AnalyticsSuite.scala b/graph/src/test/scala/org/apache/spark/graph/AnalyticsSuite.scala
new file mode 100644
index 0000000000..864d51e3f6
--- /dev/null
+++ b/graph/src/test/scala/org/apache/spark/graph/AnalyticsSuite.scala
@@ -0,0 +1,30 @@
+package org.apache.spark.graph
+
+import org.scalatest.FunSuite
+
+import org.apache.spark.SparkContext
+import org.apache.spark.graph.LocalSparkContext._
+import org.apache.spark.graph.util.GraphGenerators
+import org.apache.spark.graph.Analytics
+
+
+class AnalyticsSuite extends FunSuite with LocalSparkContext {
+
+  System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
+  System.setProperty("spark.kryo.registrator", "org.apache.spark.graph.GraphKryoRegistrator")
+
+  val sc = new Sparkcontext("local", "test")
+
+  test("Fixed Iterations PageRank") {
+    val starGraph = GraphGenerators.starGraph(sc, 1000)
+    val resetProb = 0.15
+    val prGraph1 = Analytics.pagerank(graph, 1, resetProb)
+    val prGraph2 = Analytics.pagerank(grpah, 2, resetProb)
+    val errors = prGraph1.vertices.zipJoin(prGraph2.vertices)
+      .map{ case (vid, (pr1, pr2)) => if (pr1 != pr2) { 1 } else { 0 } }.sum
+
+
+  }
+
+
+} // end of AnalyticsSuite
-- 
GitLab