package cn.doitedu.day05

import cn.doitedu.utils.IpUtils
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 根据ip地址计算归属地
 */
object T02_IpLocationDemo {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("WordCount")
      .setMaster("local[4]") //如果提交到集群中运行，setMaster必须注释掉

    val sc = new SparkContext(conf)

    //1.先读取ip规则数据
    val ipLines = sc.textFile("data/ip.txt")
    //2.对IP规则数据进行整理,然后将数据收集到Driver端
    val ipRulesInDriver: Array[(Long, Long, String, String)] = ipLines.map(line => {
      val fields = line.split("[|]")
      val startNum = fields(2).toLong
      val endNum = fields(3).toLong
      val province = fields(6)
      val city = fields(7)
      (startNum, endNum, province, city)
    }).collect()

    //3.将ip规则数据进行广播，将事先已经广播好的数据在Executor中的内存地址返回到Driver
    val broadcastRefInDriver: Broadcast[Array[(Long, Long, String, String)]] = sc.broadcast(ipRulesInDriver)

    //4.读取用户行为数据
    val logLines = sc.textFile("data/ipaccess.log")
    //对行为数据进行整理
    logLines.map(line => {
      val fields = line.split("\\|")
      val ip = fields(1)
      //将IP地址转成十进制
      val ipNum = IpUtils.ip2Long(ip)
      //获取事先已经广播到Executor中的规则数据
      val ipRulesInExecutor: Array[(Long, Long, String, String)] = broadcastRefInDriver.value
      val index = IpUtils.binarySearch(ipRulesInExecutor, ipNum)
      var province = "未知"
      if (index >= 0) {
        province = ipRulesInExecutor(index)._3
      }
      (province, 1)
    }).reduceByKey(_+_)
      .foreach(println)

    broadcastRefInDriver.unpersist(true)

  }
}
