package top.doe.spark.netty

import top.doe.spark.rpc.{RpcEndpoint, RpcEndpointAddress, RpcEndpointRef}

import java.util.concurrent.ConcurrentHashMap

class Dispatcher(val nettyEnv: NettyRpcEnv, val numUsableCores: Int) {

  //保存RpcEndpoint和RpcEndpointRef的线程安全的Map
  private val endpointRefs = new ConcurrentHashMap[RpcEndpoint, RpcEndpointRef]()
  //用来处理消息的循环线程
  private val sharedLoop = new SharedMessageLoop(this, numUsableCores)

  //将指定的endpoint进行注册（绑定起来）
  //将rpcEndpoint和rpcEndpointRef，并且将rpcEndpoint和对于的消息循环线程进行绑定
  def registerRpcEndpoint(name: String, endpoint: RpcEndpoint): RpcEndpointRef = {
    val addr = RpcEndpointAddress(nettyEnv.address, name)
    val endpointRef = new NettyRpcEndpointRef(addr ,nettyEnv)
    endpointRefs.put(endpoint, endpointRef)
    //将endpoint的名称Inbox进行绑定
    sharedLoop.register(name, endpoint)
    //TODO 将RpcEndpoint注册后，一定要调用一次onStart
    endpointRef
  }
}
