package cn.doitedu.day08

import cn.doitedu.utils.IpUtils
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.SparkSession

/**
 * 使用sql计算ip地址的归属地
 */
object T04_UDFDemo2 {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("UDFDemo1")
      .master("local[*]")
      .getOrCreate()

    //读取ip规则数据
    //1.先读取ip规则数据
    val ipLines = spark.sparkContext.textFile("data/ip.txt")
    //2.对IP规则数据进行整理,然后将数据收集到Driver端
    val ipRulesInDriver: Array[(Long, Long, String, String)] = ipLines.map(line => {
      val fields = line.split("[|]")
      val startNum = fields(2).toLong
      val endNum = fields(3).toLong
      val province = fields(6)
      val city = fields(7)
      (startNum, endNum, province, city)
    }).collect()
    //将ip规则广播
    val broadcastRefInDriver: Broadcast[Array[(Long, Long, String, String)]] = spark.sparkContext.broadcast(ipRulesInDriver)

    val df = spark.read
      .option("header", "true")
      .csv("data/address.txt")

    df.createTempView("v_info")

    //注册自定义函数
    spark.udf.register("ip2Location", (ip: String) => {
      val ipRulesInExecutor: Array[(Long, Long, String, String)] = broadcastRefInDriver.value
      val ipNum = IpUtils.ip2Long(ip)
      val index = IpUtils.binarySearch(ipRulesInExecutor, ipNum)
      var province = "未知"
      if (index >= 0) {
        province = ipRulesInExecutor(index)._3
      }
      province
    })

    spark.read.option("delimiter", "|")
      .csv("data/ipaccess.log")
      .select("_c1")
      .toDF("ip")
      .createTempView("v_access")

    spark.sql(
      """
        |select ip, ip2Location(ip) province from v_access
        |""".stripMargin)
      .show()




  }

}
