/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.spark.scheduler

import scala.collection.mutable.ListBuffer

import org.apache.spark.annotation.DeveloperApi

/**
 * :: DeveloperApi ::
 * Information about a running task attempt inside a TaskSet.
 */
@DeveloperApi
class TaskInfo(
    val taskId: Long,  //taskID
    val index: Int,  //task所在的taskset中的索引
    val attempt: Int,  //指定的task已经尝试运行的次数（不包括在正在运行的这次）
    val launchTime: Long,   //启动时间
    val executorId: String,//执行task的进行ID
    val host: String,   //主机
    val taskLocality: TaskLocality.TaskLocality,  //task的运行的优先级别
    val speculative: Boolean ) { //task是否是来自推测运行的task

  /**task远程获取result的时间点，如果task结束时立即获取到结果则不设置
   * The time when the task started remotely getting the result. Will not be set if the
   * task result was sent immediately when the task finished (as opposed to sending an
   * IndirectTaskResult and later fetching the result from the block manager).
   */
  var gettingResultTime: Long = 0

  /**
   * Intermediate updates to accumulables during this task. Note that it is valid for the same
   * accumulable to be updated multiple times in a single task or for two accumulables with the
   * same name but different IDs to exist in a task.
   */
  val accumulables = ListBuffer[AccumulableInfo]()

  /**task完成的时间点
   * The time when the task has completed successfully (including the time to remotely fetch
   * results, if necessary).
   */
  var finishTime: Long = 0

  var failed = false

  /**标记task开始获取result*/
  private[spark] def markGettingResult(time: Long = System.currentTimeMillis) {
    gettingResultTime = time
  }

  /**标记task执行成功*/
  private[spark] def markSuccessful(time: Long = System.currentTimeMillis) {
    finishTime = time
  }

  /**标记失败*/
  private[spark] def markFailed(time: Long = System.currentTimeMillis) {
    finishTime = time
    failed = true
  }

  /**是否正在获取数据*/
  def gettingResult: Boolean = gettingResultTime != 0

  /**是否task已经完成*/
  def finished: Boolean = finishTime != 0

  /**是否task成功完成*/
  def successful: Boolean = finished && !failed

  /**是否task正在运行*/
  def running: Boolean = !finished

  /**task的状态*/
  def status: String = {
    if (running) {
      "RUNNING"
    } else if (gettingResult) {
      "GET RESULT"
    } else if (failed) {
      "FAILED"
    } else if (successful) {
      "SUCCESS"
    } else {
      "UNKNOWN"
    }
  }

  def id: String = s"$index.$attempt"

  /**task执行花费的时间*/
  def duration: Long = {
    if (!finished) {
      throw new UnsupportedOperationException("duration() called on unfinished task")
    } else {
      finishTime - launchTime
    }
  }

  /**计算task的执行时间*/
  private[spark] def timeRunning(currentTime: Long): Long = currentTime - launchTime
}
