package cn.echcz.spark.examples

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 统计每个地理位置(根据ip确定位置)的访问数
  * 需要用到的数据有: data/ip2Location.txt(ip与地理位置映射) 和 data/20090121.http.format(已格式化的访问日志)
  *
  */
object LocationVisitCount {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("LocationVisitCount")
    val sc = new SparkContext(conf)
    // 读取ip2Location.txt创建rdd, 请通过第1个参数传入该文件路径
    val i2LInput: RDD[String] = sc.textFile(args(0))
    // 创建ip2LocationRdd
    val ip2LocationRdd: RDD[((Long, Long), String)] = i2LInput.map(_.split("\\|")).
      map(line => ((Utils.ip2Long(line(2)), Utils.ip2Long(line(3))), line(6)))
    // 收集得到的ip映射位置数据并广播
    val ip2Location: Array[((Long, Long), String)] = ip2LocationRdd.collect()
    val ip2LocationBroad: Broadcast[Array[((Long, Long), String)]] = sc.broadcast(ip2Location)

    // 读取20090121.http.format访问日志, 请通过第2个参数传入该文件路径
    val logInput: RDD[String] = sc.textFile(args(1))
    // 获取每次访问的ip
    val ipRdd: RDD[Long] = logInput.map(_.split("\\|")).map(line => Utils.ip2Long(line(1)))
    // 计算ip所在的地理位置
    val location: RDD[(String, Int)] = ipRdd.mapPartitions(f => {
      val broad: Array[((Long, Long), String)] = ip2LocationBroad.value
      f.map(ip => (Utils.searchRange2(ip, broad),1))
    })
    // 对每个地理位置计数
    val locationCount: RDD[(String, Int)] = location.reduceByKey(_ + _)
    // 遍历打印
    locationCount.foreach(x => println(s"${x._1} -> ${x._2}"))
    sc.stop()
  }
}
