From d8cf047d35f61c75cccaedcf978bc01f1c2fd52f Mon Sep 17 00:00:00 2001
From: Stephen Skeirik <skeirik2@illinois.edu>
Date: Sat, 5 May 2018 16:38:33 -0500
Subject: [PATCH] updated files to pass scalastyle check

---
 .../apache/spark/executor/CoarseGrainedExecutorBackend.scala  | 4 +++-
 core/src/main/scala/org/apache/spark/executor/Executor.scala  | 4 +++-
 core/src/main/scala/org/apache/spark/rdd/RDD.scala            | 2 +-
 .../main/scala/org/apache/spark/scheduler/DAGScheduler.scala  | 3 ++-
 .../main/scala/org/apache/spark/scheduler/ResultTask.scala    | 2 +-
 .../main/scala/org/apache/spark/scheduler/StageExInfo.scala   | 4 +---
 6 files changed, 11 insertions(+), 8 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index 852666260e..9214fa8cff 100644
--- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@ -96,7 +96,9 @@ private[spark] class CoarseGrainedExecutorBackend(
         val taskDesc = TaskDescription.decode(data.value)
         logInfo("Got assigned task " + taskDesc.taskId)
         // SS {
-        val currentStageId = taskDesc.name.substring(taskDesc.name.lastIndexOf(' ') + 1, taskDesc.name.lastIndexOf('.')).toInt
+        val currentStageId =
+          taskDesc.name.substring(taskDesc.name.lastIndexOf(' ') + 1,
+            taskDesc.name.lastIndexOf('.')).toInt
         // logEarne("Current in Stage: " + currentStageId)
         env.currentStage = currentStageId
         env.blockManager.currentStage = currentStageId
diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala
index 5738d15627..0def96f8cd 100644
--- a/core/src/main/scala/org/apache/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -327,7 +327,9 @@ private[spark] class Executor(
         }
 
         // SS {
-        // logEarne("Task " + taskId + " is in Stage " + task.stageId + " and the depMap is " + task.depMap)
+        // logEarne("Task " + taskId + " is in Stage " + task.stageId + " and
+        // the depMap is " + task.depMap)
+        //
         if (!env.blockManager.stageExInfos.contains(task.stageId)) {
           env.blockManager.stageExInfos.put(task.stageId,
             new StageExInfo(task.stageId, null, null, task.depMap, task.curRunningRddMap))
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 9f3e3273f9..2ea1a3318b 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -392,7 +392,7 @@ abstract class RDD[T: ClassTag](
           blockManager.blockExInfo.get(key).creatStartTime = System.currentTimeMillis()
         }
       case None =>
-        //logEarne("Some Thing Wrong")
+        // logEarne("Some Thing Wrong")
     }
     // SS }
     // This method is called on executors, so we need call SparkEnv.get instead of sc.env.
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index 77bed0cc62..9f3cc3c48c 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -1128,7 +1128,8 @@ class DAGScheduler(
             // SS {
             new ResultTask(stage.id, stage.latestInfo.attemptNumber,
               taskBinary, part, locs, id, properties, serializedTaskMetrics,
-              Option(jobId), Option(sc.applicationId), sc.applicationAttemptId, depMap, curRunningRddMap)
+              Option(jobId), Option(sc.applicationId), sc.applicationAttemptId,
+              depMap, curRunningRddMap)
             // SS }
           }
       }
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
index 12f83c83fc..7f0219af4b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
@@ -64,7 +64,7 @@ private[spark] class ResultTask[T, U](
     depMap: HashMap[Int, Set[Int]] = null,
     curRunningRddMap: HashMap[Int, Set[Int]] = null)
   extends Task[U](stageId, stageAttemptId, partition.index, localProperties, serializedTaskMetrics,
-    jobId, appId, appAttemptId,depMap,curRunningRddMap)
+    jobId, appId, appAttemptId, depMap, curRunningRddMap)
   with Serializable {
 
   @transient private[this] val preferredLocs: Seq[TaskLocation] = {
diff --git a/core/src/main/scala/org/apache/spark/scheduler/StageExInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/StageExInfo.scala
index 11b3692ec1..90389a45fc 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/StageExInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/StageExInfo.scala
@@ -19,9 +19,7 @@ package org.apache.spark.storage
 
 import scala.collection.mutable
 
-/**
-  * DS to store info of a stage.
-  */
+// DS to store info of a stage.
 class StageExInfo(val stageId: Int,
                   val alreadyPerRddSet: Set[Int], // prs
                   val afterPerRddSet: Set[Int], // aprs
-- 
GitLab