package org.apache.spark.simon

import java.util.concurrent.{Executors, ScheduledExecutorService, TimeUnit}

import org.apache.spark.SparkConf
import org.apache.spark.SecurityManager
import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcEnv, RpcEnvConfig, ThreadSafeRpcEndpoint}

import scala.concurrent.ExecutionContext.Implicits.global

/**
 * @author changf
 * @date 2024/1/16 9:54 下午
 */
class Worker(val rpcEnv:RpcEnv) extends  ThreadSafeRpcEndpoint{
  var masterEndpointRef:RpcEndpointRef=_
  val WORKER_ID="worker01"
  override def onStart(): Unit = {
    //println("onStart方法被调用")
    //worker 向master建立连接 发送消息
    masterEndpointRef = rpcEnv.setupEndpointRef(RpcAddress("localhost", 8888), "Master")
    masterEndpointRef.send(RegisterWorker(self,WORKER_ID,800,4))
  }

  override def receive: PartialFunction[Any, Unit] = {
    case "response"=>{
      println("worker 接收到master返回信息")
      //发送同步消息
      val future = masterEndpointRef.ask[String]("ask-msg")
      future.map(e=>println(e))
    }
    case RegisteredWorker=>{
      //启动定时器
      val service: ScheduledExecutorService = Executors.newScheduledThreadPool(1)
      //定时发送心跳
      service.scheduleAtFixedRate(new Runnable {
        override def run(): Unit = {
          masterEndpointRef.send(new HeartBeat(WORKER_ID))
        }
      },0,10,TimeUnit.SECONDS)
    }
  }
}

/**
 * 伴生对象
 */
object Worker{
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
    val manager: SecurityManager = new SecurityManager(conf)

    val sparkWorker: RpcEnv = RpcEnv.create("SparkWorker", "localhost", 9999, conf, manager)
    //创建RpcEndpoint
    val worker: Worker = new Worker(sparkWorker)
    //
    val workerRpcEndPointRef: RpcEndpointRef = sparkWorker.setupEndpoint("Worker", worker)
    //
    //workerRpcEndPointRef.send()
    sparkWorker.awaitTermination()
  }
}
