diff --git a/examples/src/main/python/kmeans.py b/examples/src/main/python/kmeans.py
index 19391506463f09a8e8649217930617c780f10044..1456c8731284159f4934b47862a459e60d33a473 100755
--- a/examples/src/main/python/kmeans.py
+++ b/examples/src/main/python/kmeans.py
@@ -68,14 +68,14 @@ if __name__ == "__main__":
         closest = data.map(
             lambda p: (closestPoint(p, kPoints), (p, 1)))
         pointStats = closest.reduceByKey(
-            lambda (x1, y1), (x2, y2): (x1 + x2, y1 + y2))
+            lambda (p1, c1), (p2, c2): (p1 + p2, c1 + c2))
         newPoints = pointStats.map(
-            lambda xy: (xy[0], xy[1][0] / xy[1][1])).collect()
+            lambda st: (st[0], st[1][0] / st[1][1])).collect()
 
-        tempDist = sum(np.sum((kPoints[x] - y) ** 2) for (x, y) in newPoints)
+        tempDist = sum(np.sum((kPoints[iK] - p) ** 2) for (iK, p) in newPoints)
 
-        for (x, y) in newPoints:
-            kPoints[x] = y
+        for (iK, p) in newPoints:
+            kPoints[iK] = p
 
     print("Final centers: " + str(kPoints))
 
diff --git a/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
index f73eac1e2b906d5be72d897b0e742f0f5876525d..04fc0a033014afd885562bfaf7765d2c3df79778 100644
--- a/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
@@ -99,7 +99,7 @@ object LocalKMeans {
 
       var pointStats = mappings.map { pair =>
         pair._2.reduceLeft [(Int, (Vector[Double], Int))] {
-          case ((id1, (x1, y1)), (id2, (x2, y2))) => (id1, (x1 + x2, y1 + y2))
+          case ((id1, (p1, c1)), (id2, (p2, c2))) => (id1, (p1 + p2, c1 + c2))
         }
       }
 
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
index 48e8d11cdf95bbacb60862e553beeb4a25665371..b514d9123f5e770268bbf2f7b31f696789ca4d55 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
@@ -79,7 +79,7 @@ object SparkKMeans {
     while(tempDist > convergeDist) {
       val closest = data.map (p => (closestPoint(p, kPoints), (p, 1)))
 
-      val pointStats = closest.reduceByKey{case ((x1, y1), (x2, y2)) => (x1 + x2, y1 + y2)}
+      val pointStats = closest.reduceByKey{case ((p1, c1), (p2, c2)) => (p1 + p2, c1 + c2)}
 
       val newPoints = pointStats.map {pair =>
         (pair._1, pair._2._1 * (1.0 / pair._2._2))}.collectAsMap()