package part01

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.control.Breaks.{break, breakable}

object IP {
  def compare(startIP: String, endIP: String, IP: String): Boolean = {
    //比对IP地址是否在ip地址区间范围
    val startArr = startIP.split("\\.")
    val endArr = endIP.split("\\.")
    val ipArr = IP.split("\\.")

    var ipSumStart = 0L
    var ipSumEnd = 0L
    var ip = 0L
    for (i <- 0 until 4) {
      ipSumStart += startArr(i).toLong * scala.math.pow(255, 3 - i).toLong
      ipSumEnd += endArr(i).toLong * scala.math.pow(255, 3 - i).toLong
      ip += ipArr(i).toLong * scala.math.pow(255, 3 - i).toLong
    }
    ip >= ipSumStart && ip <= ipSumEnd
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init)
      .setMaster("local[*]")

    val sc = new SparkContext(conf)
    //http.log样例数据。格式：时间戳、IP地址、访问网址、访问数据、浏览器信息
    val http: RDD[String] = sc.textFile("data/http.log")
    val ip: RDD[String] = sc.textFile("data/ip.dat")

    val ipRange = ip.map(_.split("\\|"))
      .map(x => (if (x(7).isEmpty) x(6) else x(7), (x(0), x(1))))
      .groupByKey()
    //广播有城市信息的IP地址范围，给执行器使用
    val ipRangeBC = sc.broadcast(ipRange.collectAsMap())

    // 要求：将 http.log 文件中的 ip 转换为地址。如将 122.228.96.111 转为 温州
    val ipCity = http.map(_.split("\\|"))
      .map(line => { //遍历数据进行匹配增加城市信息
        var city: String = ""
        breakable {
          for ((k, v) <- ipRangeBC.value) {
            v.foreach { case (start, end) =>
              if (compare(start, end, line(1))) {
                city = k
                break //匹配到之后就跳出循环，防止不必要的数据匹配
              } else {
                city = "no city"
              }
            }
          }
        }
        (line(1), city, 1)
      })

    // 统计各城市的总访问量
    ipCity.map(f => (f._2, f._3)) //调整数数据格式
      .reduceByKey(_ + _) //累加求和
      .sortBy(f => f._2, ascending = false) //排序
      .collect.foreach(println) //输出

    sc.stop()
  }
}
