package org.apache.spark.rpc.netty

import java.util.concurrent.TimeUnit

import org.apache.spark.rpc.{RpcCallContext, RpcEnv, ThreadSafeRpcEndpoint}
import org.apache.spark.{SecurityManager, SparkConf}


sealed class TestMathBase
case class TestAdd(a: Int, b: Int) extends TestMathBase
case class TestSub(a: Int, b: Int) extends TestMathBase

case class Result(result: Int)
case class HelloMessage(msg: String)

/**
  * endpoint 相当于服务端的handler, 接收到消息该如何处理
  * @param rpcEnv
  */
class RpcServerEndPoint(override val rpcEnv: RpcEnv) extends ThreadSafeRpcEndpoint {
  override def onStart(): Unit = {
    println("rpc start")
  }
  override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
    case TestAdd(a, b) =>
      println(s"received test add: a: $a, b: $b")
      context.reply(Result(a + b))
    case TestSub(a, b) =>
      println(s"received test sub: a: $a, b: $b")
      context.reply(Result(a - b))
  }
  override def receive: PartialFunction[Any, Unit] = {
    case HelloMessage(msg) =>
      println(s"received msg: $msg")
  }
  override def onStop(): Unit = {
    println("rpc stop")
  }
}

object RpcServer {
  val SERVER_ENDPOINT_NAME = "RpcServer"

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    val systemName = "sparkDriver"
    val hostname = "127.0.0.1"
    val port = 4040

    val securityManager = new SecurityManager(conf)
    // 创建rpc context，创建并启动rpc服务端的服务(netty server)
    val rpcEnv = RpcEnv.create(systemName, hostname, port, conf, securityManager)

    // 绑定endpoint，即handler
    val endpointRed = rpcEnv.setupEndpoint(SERVER_ENDPOINT_NAME, new RpcServerEndPoint(rpcEnv))

    TimeUnit.SECONDS.sleep(100000)
  }
}
