diff --git a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala index a1d5e6cf62daa0a5847960b5243a75029cc79992..94f8b0128502cadc752fd7d01f4d17b090300f90 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala @@ -70,7 +70,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * Create a log file for one job * @param jobID ID of the job - * @return No return * @exception FileNotFoundException Fail to create log file */ protected def createLogWriter(jobID: Int) { @@ -85,7 +84,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * Close log file, and clean the stage relationship in stageIDToJobID * @param jobID ID of the job - * @return No return */ protected def closeLogWriter(jobID: Int) { jobIDToPrintWriter.get(jobID).foreach { fileWriter => @@ -103,7 +101,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { * @param jobID ID of the job * @param info Info to be recorded * @param withTime Controls whether to record time stamp before the info, default is true - * @return No return */ protected def jobLogInfo(jobID: Int, info: String, withTime: Boolean = true) { var writeInfo = info @@ -119,7 +116,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { * @param stageID ID of the stage * @param info Info to be recorded * @param withTime Controls whether to record time stamp before the info, default is true - * @return No return */ protected def stageLogInfo(stageID: Int, info: String, withTime: Boolean = true) { stageIDToJobID.get(stageID).foreach(jobID => jobLogInfo(jobID, info, withTime)) @@ -129,7 +125,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { * Build stage dependency for a job * @param jobID ID of the job * @param stage Root stage of the job - * @return No return */ protected def buildJobDep(jobID: Int, stage: Stage) { if (stage.jobId == jobID) { @@ -147,7 +142,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * Record stage dependency and RDD dependency for a stage * @param jobID Job ID of the stage - * @return No return */ protected def recordStageDep(jobID: Int) { def getRddsInStage(rdd: RDD[_]): ListBuffer[RDD[_]] = { @@ -206,7 +200,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { * @param jobID Job ID of the stage * @param rdd Root RDD of the stage * @param indent Indent number before info - * @return No return */ protected def recordRddInStageGraph(jobID: Int, rdd: RDD[_], indent: Int) { val rddInfo = @@ -231,9 +224,7 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { * @param jobID Job ID of the stage * @param stage Root stage of the job * @param indent Indent number before info, default is 0 - * @return No return */ - protected def recordStageDepGraph(jobID: Int, stage: Stage, idSet: HashSet[Int], indent: Int = 0) { val stageInfo = if (stage.isShuffleMap) { "STAGE_ID=" + stage.id + " MAP_STAGE SHUFFLE_ID=" + stage.shuffleDep.get.shuffleId @@ -258,7 +249,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { * @param status Status info of the task * @param taskInfo Task description info * @param taskMetrics Task running metrics - * @return No return */ protected def recordTaskMetrics(stageID: Int, status: String, taskInfo: TaskInfo, taskMetrics: TaskMetrics) { @@ -287,7 +277,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * When stage is submitted, record stage submit info * @param stageSubmitted Stage submitted event - * @return No return */ override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted) { stageLogInfo(stageSubmitted.stage.stageId,"STAGE_ID=%d STATUS=SUBMITTED TASK_SIZE=%d".format( @@ -297,7 +286,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * When stage is completed, record stage completion status * @param stageCompleted Stage completed event - * @return No return */ override def onStageCompleted(stageCompleted: StageCompleted) { stageLogInfo(stageCompleted.stage.stageId, "STAGE_ID=%d STATUS=COMPLETED".format( @@ -309,7 +297,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * When task ends, record task completion status and metrics * @param taskEnd Task end event - * @return No return */ override def onTaskEnd(taskEnd: SparkListenerTaskEnd) { val task = taskEnd.task @@ -342,7 +329,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * When job ends, recording job completion status and close log file * @param jobEnd Job end event - * @return No return */ override def onJobEnd(jobEnd: SparkListenerJobEnd) { val job = jobEnd.job @@ -362,7 +348,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { * Record job properties into job log file * @param jobID ID of the job * @param properties Properties of the job - * @return No return */ protected def recordJobProperties(jobID: Int, properties: Properties) { if(properties != null) { @@ -374,7 +359,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging { /** * When job starts, record job property and stage graph * @param jobStart Job start event - * @return No return */ override def onJobStart(jobStart: SparkListenerJobStart) { val job = jobStart.job