You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2013/11/05 03:21:36 UTC

[3/4] git commit: delete @return if method has no return value

delete @return if method has no return value


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/7374376f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/7374376f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/7374376f

Branch: refs/heads/branch-0.8
Commit: 7374376f55015c6ba5d4b7fd551aebcb83b1d041
Parents: 4ca72c8
Author: Mingfei <mi...@intel.com>
Authored: Tue Nov 5 08:44:32 2013 +0800
Committer: Mingfei <mi...@intel.com>
Committed: Tue Nov 5 08:44:32 2013 +0800

----------------------------------------------------------------------
 .../org/apache/spark/scheduler/JobLogger.scala      | 16 ----------------
 1 file changed, 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/7374376f/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
index a1d5e6c..94f8b01 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
@@ -70,7 +70,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * Create a log file for one job
    * @param jobID ID of the job
-   * @return No return
    * @exception FileNotFoundException Fail to create log file
    */
   protected def createLogWriter(jobID: Int) {
@@ -85,7 +84,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * Close log file, and clean the stage relationship in stageIDToJobID
    * @param jobID ID of the job
-   * @return No return
    */
   protected def closeLogWriter(jobID: Int) { 
     jobIDToPrintWriter.get(jobID).foreach { fileWriter => 
@@ -103,7 +101,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
    * @param jobID ID of the job
    * @param info Info to be recorded
    * @param withTime Controls whether to record time stamp before the info, default is true
-   * @return No return
    */
   protected def jobLogInfo(jobID: Int, info: String, withTime: Boolean = true) {
     var writeInfo = info
@@ -119,7 +116,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
    * @param stageID ID of the stage
    * @param info Info to be recorded
    * @param withTime Controls whether to record time stamp before the info, default is true
-   * @return No return
    */  
   protected def stageLogInfo(stageID: Int, info: String, withTime: Boolean = true) { 
     stageIDToJobID.get(stageID).foreach(jobID => jobLogInfo(jobID, info, withTime))
@@ -129,7 +125,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
    * Build stage dependency for a job
    * @param jobID ID of the job
    * @param stage Root stage of the job
-   * @return No return
    */  
   protected def buildJobDep(jobID: Int, stage: Stage) {
     if (stage.jobId == jobID) {
@@ -147,7 +142,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * Record stage dependency and RDD dependency for a stage
    * @param jobID Job ID of the stage
-   * @return No return
    */   
   protected def recordStageDep(jobID: Int) {
     def getRddsInStage(rdd: RDD[_]): ListBuffer[RDD[_]] = {
@@ -206,7 +200,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
    * @param jobID Job ID of the stage
    * @param rdd Root RDD of the stage
    * @param indent Indent number before info
-   * @return No return
    */
   protected def recordRddInStageGraph(jobID: Int, rdd: RDD[_], indent: Int) {
     val rddInfo = 
@@ -231,9 +224,7 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
    * @param jobID Job ID of the stage
    * @param stage Root stage of the job
    * @param indent Indent number before info, default is 0
-   * @return No return
    */  
-  
   protected def recordStageDepGraph(jobID: Int, stage: Stage, idSet: HashSet[Int], indent: Int = 0) {
     val stageInfo = if (stage.isShuffleMap) {
       "STAGE_ID=" + stage.id + " MAP_STAGE SHUFFLE_ID=" + stage.shuffleDep.get.shuffleId
@@ -258,7 +249,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
    * @param status Status info of the task
    * @param taskInfo Task description info
    * @param taskMetrics Task running metrics
-   * @return No return
    */
   protected def recordTaskMetrics(stageID: Int, status: String, 
                                 taskInfo: TaskInfo, taskMetrics: TaskMetrics) {
@@ -287,7 +277,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * When stage is submitted, record stage submit info
    * @param stageSubmitted Stage submitted event
-   * @return No return
    */
   override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted) {
     stageLogInfo(stageSubmitted.stage.stageId,"STAGE_ID=%d STATUS=SUBMITTED TASK_SIZE=%d".format(
@@ -297,7 +286,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * When stage is completed, record stage completion status
    * @param stageCompleted Stage completed event
-   * @return No return
    */
   override def onStageCompleted(stageCompleted: StageCompleted) {
     stageLogInfo(stageCompleted.stage.stageId, "STAGE_ID=%d STATUS=COMPLETED".format(
@@ -309,7 +297,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * When task ends, record task completion status and metrics
    * @param taskEnd Task end event
-   * @return No return
    */
   override def onTaskEnd(taskEnd: SparkListenerTaskEnd) {
     val task = taskEnd.task
@@ -342,7 +329,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * When job ends, recording job completion status and close log file
    * @param jobEnd Job end event
-   * @return No return
    */
   override def onJobEnd(jobEnd: SparkListenerJobEnd) {
     val job = jobEnd.job
@@ -362,7 +348,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
    * Record job properties into job log file
    * @param jobID ID of the job
    * @param properties Properties of the job
-   * @return No return
    */
   protected def recordJobProperties(jobID: Int, properties: Properties) {
     if(properties != null) {
@@ -374,7 +359,6 @@ class JobLogger(val logDirName: String) extends SparkListener with Logging {
   /** 
    * When job starts, record job property and stage graph
    * @param jobStart Job start event
-   * @return No return
    */
   override def onJobStart(jobStart: SparkListenerJobStart) {
     val job = jobStart.job