package homework1

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 思路是将IP地址转换为方便计算的数字，然后将ip.dat读取的RDD展开，然后和HTTP做左外连接
 */
object HttpToAddress {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("test")
    val sc = new SparkContext(conf)


    val httpRDD: RDD[(Long, Int)] = sc.textFile("data/http.log").map { line =>
       val strs = line.split("\\|")
       val ipNumber = exchangeIp(strs(1))
       (ipNumber, 1)
    }

    /**
     * ip.data文件数量有限，是小文件，可考虑当成广播变量
     */
    val ipRDD: RDD[(String, (Long, Long))] = sc.textFile("data/ip.dat").map { line =>
      val strs = line.split("\\|")
      (strs(7), (exchangeIp(strs(0)), exchangeIp(strs(1))))
    }

    //将ipRDD从区间最小值到最大值展开
    val ipsRDD: RDD[(Long, String)] = ipRDD.flatMapValues(x => x._1 to x._2).map(v => (v._2, v._1))

    println("-----------做关联------------")
    //两个RDD做左关联
    val resultMap: RDD[(Long, (Int, Option[String]))] = httpRDD.leftOuterJoin(ipsRDD)
    val result: RDD[(String, Int)] = resultMap.map(v => (v._2._2.getOrElse("null"), v._2._1))

    println("-----------统计------------")
    val countResult: collection.Map[String, Long] = result.countByKey()

    println("-----------获取最后结果------------")
    val data: collection.Map[String, Long] = countResult.take(10)
    println(data.toBuffer)//.map(v => println(s"${v._1}\t${v._2}"))
    sc.stop()
  }

  //将ip转换为数字，方便计算
  def exchangeIp(ip: String): Long = {
    val ips = ip.split("\\.")
    var str = ""
    ips.foreach{ item =>
      str = f"${str}${item.toInt}%03d"
    }
    val num = str.toLong
    num
  }
}