package SparkMaster_Worker

import akka.actor.{Actor, ActorSystem, Props}
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration.DurationInt
import scala.collection.mutable

object Master {
  def main(args: Array[String]): Unit = {
    val (workerHost, workerPort, masterHost, masterPort) = ("127.0.0.1", 10006, "127.0.0.1", 10005)
    val conf = ConfigFactory.parseString(
      s"""|akka.actor.provider="akka.remote.RemoteActorRefProvider"
          |akka.remote.netty.tcp.hostname=$masterHost
          |akka.remote.netty.tcp.port=$masterPort
      """.stripMargin
    )
    val masterSystem = ActorSystem("sparkMaster", conf)
    //创建sparkMaster -actor
    val sparkMaster01 = masterSystem.actorOf(Props[Master], "sparkMaster01")
    sparkMaster01 ! "start"
  }

}

class Master extends Actor {
  //定义hashMap管理workers
  var workers = mutable.HashMap[String, workerInfo]()

  override def receive: Receive = {
    case "start" => {
      println("master 启动成功")
      //定时检测worker心跳是否正常
      self ! StartTimeOutWorker
    }
    case RegisterWorkerInfo(id, cpu, ram) => {
      //接受注册信息
      if (!workers.contains(id)) {
        workers += ((id, new workerInfo(id, cpu, ram)))
        //回复注册成功
        sender() ! RegisterWorkerInfo
      }
    }
    case HeartBeat(id) => {
      //更新队形的worker的心跳时间
      //1. 从worker中取出wokerInfo
      val wokerInfo = workers(id)
      wokerInfo.lastHeartBeat = System.currentTimeMillis()
      println("master 更新了" + wokerInfo.id + " 的心跳时间")
    }
    case StartTimeOutWorker => {
      println("kaishi dingshi jiance worker 任务")

      import context.dispatcher
      context.system.scheduler.schedule(0 millis, 9000 millis, self, RemoveTimeOutWorker)
    }
    case RemoveTimeOutWorker => {
      val nowTime = System.currentTimeMillis()
      workers = workers.filter(worker => (nowTime - worker._2.lastHeartBeat) < 6000)
      println(s"当前${workers.size} 存活")
    }
  }
}