package cn.bigdata.sparkcore.job

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 要求：将 http.log 文件中的 ip 转换为地址。如将 122.228.96.111 转为温州，并统计各城市的总访问量
 */
object Case1 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(this.getClass.getName.init).setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("warn")

    val ipInfoRDD = sc.textFile("data/ip.dat").map(line => {
      val arr = line.split("\\|")
      ((arr(2).toLong, arr(3).toLong), arr(7))
    })

    // 将IP地址和城市映射转成广播变量
    val ipInfoBC = sc.broadcast(ipInfoRDD.collectAsMap())

    // 读取nginx日志文件,获取IP地址,生成RDD
    val httpLogReduce: RDD[(String, Int)] = sc.textFile("data/http.log").map(line => {
      val arr = line.split("\\|")
      // 初始化每个IP的count为1
      (arr(1), 1)
    }).reduceByKey(_ + _)

    // 最后将IP换成城市
    httpLogReduce.map {
      case (ip, count) => {
        var ipCity = "未知"
        val ipLong = ipToLong(ip)
        ipInfoBC.value.foreach {
          case (ipRange, city) => {
            if (ipLong >= ipRange._1 && ipLong <= ipRange._2) {
              ipCity = city
            }
          }
        }
        if (ipCity == "未知"){
          println(ip)
        }
        (ipCity, count)
      }
    }.reduceByKey(_+_).sortBy(_._2).collect().foreach(println)

    sc.stop()
  }

  /**
   * 将ip地址转换成数字类型的
   * @param ip
   * @return
   */
  def ipToLong(ip: String): Long = {
    val fragments = ip.split("[.]")
    var ipNum = 0L
    for (i <- 0 until fragments.length) {
      ipNum = fragments(i).toLong | ipNum << 8L
    }
    ipNum
  }
}
