package org.apache.spark.netty

import java.util.concurrent.TimeUnit

import org.apache.spark.{SparkConf, SparkContext, SparkEnv}
import org.apache.spark.common.Message.{CheckTimeOut, Heartbeat, NotifyOffline, NotifyOnline, RegisterClient, RegisterNettyClient, RegisteredClient, RegisteredNettyClient, SendMessage}
import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEndpoint, RpcEnv}
import org.apache.spark.sql.SparkSession
import org.apache.spark.util.{ThreadUtils, Utils}
import org.apache.spark.common.Constant
/**
 * @author: chenzhidiao
 * @date: 2021/1/7 7:37
 * @description:
 * @version: 1.0
 */
class MyNettyRpcClient(override val rpcEnv: RpcEnv,
                       val clientHostname: String,
                       clientid: String,
                       val clientport: Int,
                       serverEndpointName:String,
                       clientEndpointName: String
                      ) extends RpcEndpoint {

  //服务端代理对象
  var serverEndpointRef = rpcEnv.setupEndpointRef(new RpcAddress("localhost", 5678), serverEndpointName)

  override def onStart(): Unit = {

    println(s"客户端${clientid}正在注册")

    //给服务端发送注册消息
    serverEndpointRef.send(RegisterNettyClient(clientid, clientHostname, clientport, clientEndpointName))
  }

  //只接收消息
  override def receive: PartialFunction[Any, Unit] = {

    case NotifyOnline(clientid) =>{
      println(s"客户端${clientid}上线")
    }

    case NotifyOffline(clientid) =>{
      println(s"客户端${clientid}下线")
    }

    case RegisteredNettyClient(msg) => {

      println(s"客户端${clientid}"+msg)
      //启动一个定时任务，每隔4秒进行一次心跳
      ThreadUtils.newDaemonSingleThreadScheduledExecutor("send-heartbeat-thread")
        .scheduleAtFixedRate(new Runnable {
          override def run(): Unit = Utils.tryLogNonFatalError {
            /**
             * 给自己一个发送心跳的消息
             */
            self.send(SendMessage)
          }

        }, 0, 4000, TimeUnit.MILLISECONDS)
    }

    case SendMessage =>{
      //给服务端发送心跳消息
      serverEndpointRef.send(Heartbeat(clientid))
    }

  }

}


object MyNettyRpcClient {
  /**
   * 启动参数列表：客户端端口号，客户端id，客户端RpcEnv名称，客户端Endpoint名称
   * 如：7112 client2 NettyRpcClient2 ClientEndpoint2
   * @param args
   */
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    val sparkSession: SparkSession = SparkSession.builder().config(conf).appName("NettyRpcTest").master("local[*]").getOrCreate()
    val sc: SparkContext = sparkSession.sparkContext
    val sparkEnv: SparkEnv = sc.env

    val hostname = "localhost"
    val port= args(0).toInt
    val clientid = args(1)
    val rpcName:String = args(2)
    val clientEndPointName = args(3)
    val rpcEnv:RpcEnv = RpcEnv.create(rpcName,hostname,port,conf,sparkEnv.securityManager,false)

    rpcEnv.setupEndpoint(clientEndPointName,new MyNettyRpcClient(rpcEnv,hostname,clientid,port,Constant.NettyServerEndpointName,clientEndPointName))

    rpcEnv.awaitTermination()
  }

}
