package com.jhhe.homework.SparkMW

import java.util.UUID
import akka.actor.{Actor, ActorSelection, ActorSystem, Props}
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._

class SparkWorker(masterHost:String,masterPort:Int,masterName:String) extends Actor{
  // master的引用
  var masterPorxy : ActorSelection = _
  // 这三个属性一定要是全局属性
  val id = UUID.randomUUID().toString
  val cupSize = Runtime.getRuntime.availableProcessors()  //获取当前系统的cup个数
  private val memorySize: Long = Runtime.getRuntime.totalMemory() //获取当前系统的内存总量

  override def preStart(): Unit = {
    masterPorxy = context.actorSelection(s"akka.tcp://SparkMasterApp@${masterHost}:${masterPort}/user/${masterName}")
    println("masterPorxy: "+masterPorxy)
  }
  override def receive: Receive = {
    case "start" => {
      println("worker启动了。。。")
      // 向master发送注册信息
      masterPorxy ! RegisterWorkerInfo(id,cupSize,memorySize)
    }
    case RegisteredWorkerInfo =>{
      println("worker注册成功： "+id)
      //当注册成功后，就定义一个定时器,每隔一定时间，发送SendHeartBeat给自己
      import context.dispatcher
      //说明
      //1. 0 millis 不延时，立即执行定时器
      //2. 3000 millis 表示每隔3秒执行一次
      //3. self:表示发给自己
      //4. SendHeartBeat 发送的内容
      context.system.scheduler.schedule(0 millis, 3000 millis, self, SendHeartBeat)
    }
    case SendHeartBeat =>{
      println("worker = " + id + "给master发送心跳")
      masterPorxy ! HeartBeat(id)
    }
  }
}

object SparkWorkerApp{
  def main(args: Array[String]): Unit = {

    if (args.length != 6) {
      // 这里可以同时启动多个worker来验证当其中一个worker挂了，master是否能及时清除它
      //127.0.0.1 10001 SparkWorker01 127.0.0.1 10005 SparkMaster
      //127.0.0.1 10002 SparkWorker02 127.0.0.1 10005 SparkMaster
      //127.0.0.1 10003 SparkWorker03 127.0.0.1 10005 SparkMaster
      println("请输入参数 workerHost workerPort workerName masterHost masterPort masterName")
      sys.exit()
    }
    val workerHost = args(0)
    val workerPort = args(1)
    val workerName = args(2)
    val masterHost = args(3)
    val masterPort = args(4)
    val masterName = args(5)

    //    val (masterHost,masterPort,workerHost,workerPort) =("127.0.0.1",10005,"127.0.0.1",10001)

    val config = ConfigFactory.parseString(
      s"""
         |akka.actor.provider="akka.remote.RemoteActorRefProvider"
         |akka.remote.netty.tcp.hostname=${workerHost}
         |akka.remote.netty.tcp.port=${workerPort}
            """.stripMargin)
    val sparkWorkerApp = ActorSystem("SparkWorkerApp",config)
    val sparkWorkerActorRef = sparkWorkerApp.actorOf(Props(new SparkWorker(masterHost,masterPort.toInt,masterName)),
      s"${workerName}")
    sparkWorkerActorRef ! "start"
  }
}

