package cn.lagou.spark.ipArea

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * 1、找到ip所属区域
 */

case class Ip(startIp: String, endIp: String, address: String)

object FindIPArea {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getCanonicalName)
      .master("local[*]")
      .getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("warn")

    val ipLogsRDD: RDD[String] = sc.textFile("data/http.log")
      .map(_.split("\\|")(1))

    val ipInfoRDD: RDD[Ip] = sc.textFile("data/ip.dat").map {
      line: String => {
        val strSplit: Array[String] = line.split("\\|")
        Ip(strSplit(0), strSplit(1), strSplit(7))
      }
    }

    val brIPInfo: Broadcast[Array[(Long, Long, String)]] =
      sc.broadcast(ipInfoRDD.map(
        x => (ip2Long(x.startIp), ip2Long(x.endIp), x.address)
      ).collect())

    ipLogsRDD
      .map(x => {
        val index: Int = binarySearch(brIPInfo.value, ip2Long(x))
        if (index != -1)
          brIPInfo.value(index)._3
        else
          "NULL"
      }).map(x => (x, 1))
      .reduceByKey(_ + _)
      .sortBy(_._2, ascending = false)
      .map(x => s"${x._1}, 总访问量：${x._2}")
      .collect
      .foreach(println)

  }

  //位运算，ip转成long类型,网上查的
  def ip2Long(ip: String): Long = {
    ip.split("\\.")
      .map(_.toLong)
      .fold(0L) { (buffer, elem) =>
        buffer << 8 | elem
      }
  }

  //二分法匹配ip规则
  def binarySearch(lines: Array[(Long, Long, String)], ip: Long): Int = {
    var low = 0
    var high: Int = lines.length - 1
    while (low <= high) {
      val middle: Int = (low + high) / 2
      if ((ip >= lines(middle)._1) && (ip <= lines(middle)._2))
        return middle
      if (ip < lines(middle)._1)
        high = middle - 1
      else {
        low = middle + 1
      }
    }
    -1
  }

}
