package org.apache.spark.deploy

import java.text.SimpleDateFormat
import java.util.concurrent.TimeUnit
import java.util.{Date, Locale}

import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.rpc.{RpcAddress, RpcEnv, ThreadSafeRpcEndpoint}
import org.apache.spark.util.{ThreadUtils, Utils}

class MyWorker(override val rpcEnv: RpcEnv,
               cores: Int,
               memory: Int,
               masterRpcAddress: Array[RpcAddress],
               endpointName: String,
               val conf: SparkConf,
               val securityMgr: SecurityManager
              ) extends ThreadSafeRpcEndpoint {
  private val host = rpcEnv.address.host
  private val port = rpcEnv.address.port
  private val workerId = generateWorkerId()
  private val HEARTBEAT_MILLIS = 15 * 1000
  private val forwardMessageScheduler =
    ThreadUtils.newDaemonSingleThreadScheduledExecutor("fake-worker-forward-message-scheduler")

  override def onStart(): Unit = {
    val info = "Starting Spark worker %s:%d with %d cores, %sM RAM"
      .format(host, port, cores, memory)
    println(info)

    masterRpcAddress.foreach { masterAddress =>
      val masterEndpoint = rpcEnv.setupEndpointRef(masterAddress, MyMaster.ENDPOINT_NAME)
      masterEndpoint.send(RegisterWorker(workerId, host, port, self, cores, memory, masterAddress))
    }
  }


  override def receive: PartialFunction[Any, Unit] = {
    case RegisteredWorker(master, masterAddress) =>
      println(s"RegisteredWorker: master=$master masterAddress=$masterAddress")
      forwardMessageScheduler.scheduleAtFixedRate(() => Utils.tryLogNonFatalError {
        master.send(Heartbeat(workerId, self))
      }, 0, HEARTBEAT_MILLIS, TimeUnit.MICROSECONDS)
    case _ => println("Worker 接收到 Unknown Message")
  }

  private def createDateFormat =
    new SimpleDateFormat("yyyyMMddHHmmss", Locale.US)

  private def generateWorkerId(): String = {
    "worker-%s-%s-%d".format(createDateFormat.format(new Date),
      host, port)
  }
}

object MyWorker {
  val SYSTEM_NAME = "fakeWorker"
  val ENDPOINT_NAME = "worker"

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf
    val host = "localhost"
    val port = 9998

    val systemName = SYSTEM_NAME
    val securityManager = new SecurityManager(conf)
    val rpcEnv = RpcEnv.create(systemName, host, port, conf, securityManager)
    val masterAddresses = Array(RpcAddress("localhost", 9999))

    rpcEnv.setupEndpoint(ENDPOINT_NAME,
      new MyWorker(rpcEnv, 4, 4096, masterAddresses, ENDPOINT_NAME, conf, securityManager))

    rpcEnv.awaitTermination()
  }
}