package com.haozhen.spark

import java.util.UUID

import akka.actor.{Actor, ActorRef, ActorSelection, ActorSystem, Props}
import com.haozhen.spark.Common.{HeartBeat, RegisterWorkInfo, RegisteredWorkerInfo, SendHeartBeat, StartMessage}
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2020/12/9  21:50
  */

class SparkWorker(masterHost:String,masterPort:Int) extends Actor{

  var  masterProxy:ActorSelection= _

  override def preStart():Unit={
    super.preStart()
    masterProxy = context.actorSelection(s"akka.tcp://SparkMaster@${masterHost}:${masterPort}/user/SparkMaster")
  }

  val id = UUID.randomUUID().toString



  override def receive: Receive = {
    case StartMessage(msg) =>
      println(s"Spark Worker start. $msg")
      masterProxy!RegisterWorkInfo(id,8,8*1024)
    case RegisteredWorkerInfo =>{
      println(s"收到sparkMaster回复消息，$id 注册成功！")
      context.system.scheduler.schedule(0 millis,3000 millis,self,SendHeartBeat)
    }
    case SendHeartBeat =>{
      println(s"worker[${id}]发送心跳！")
      masterProxy! HeartBeat(id)
    }
  }
}

object SparkWorker {

  val(masterHost,masterPort,workerHost,workerPort) = ("127.0.0.1",10000,"127.0.0.1",10001)

  val config = ConfigFactory.parseString(
    s"""
       |akka.actor.provider="akka.remote.RemoteActorRefProvide"
       |akka.remote.netty.tcp.hostname=$workerHost
       |akka.remote.netty.tcp.port=$workerPort
     """.stripMargin
  )

  val sparkWorkerSystem = ActorSystem("SparkWorker",config)
  private val sparkActor:ActorRef = sparkWorkerSystem.actorOf(
    Props(new SparkWorker(masterHost,masterPort)),"sparkWorker-01"
  )

  sparkActor! StartMessage(s"host:$workerHost port:$workerPort")
}
