package com.lagou

import org.apache.log4j.{Level, Logger}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object WorkOne {
  def main(args: Array[String]): Unit = {
    // 1、创建SparkContext
    Logger.getLogger("org").setLevel(Level.WARN)
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getCanonicalName}")
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    // 2、生成RDD;RDD转换

    // 日志RDD
    val logRDD: RDD[String] = sc.textFile("data/source1/http.log").map {
      line =>
        val fields = line.split("\\|")
        fields(1)
    }

    // ip地域查询RDD
    val ipRDD: RDD[((Long, Long), String)] = sc.textFile("data/source1/ip.dat").map {
      line =>
        val fields = line.split("\\|")
        ((ip2Long(fields(0)), ip2Long(fields(1))), fields(7))
    }

    // 广播小表
    val ipBC: Broadcast[collection.Map[(Long, Long), String]] = sc.broadcast(ipRDD.collectAsMap())

    // map端非等值join
    val ipCityRDD = logRDD.map {
      ip =>
        val ipTab: collection.Map[(Long, Long), String] = ipBC.value
        val ipArray: Array[((Long, Long), String)] = ipTab.toArray
        val city: String = ipArray.filter(ipInfo => ipInfo._1._1 <= ip2Long(ip) && ipInfo._1._2 >= ip2Long(ip)).head._2
        (city, ip)
    }


    // 3、结果输出
    println("(ip.dat的城市 ，  http.log 中的ip) 收集信息如下(取前十五条查看)：")
    ipCityRDD.take(15).foreach(println)
    println("各城市的总访问量如下：")
    ipCityRDD.map { case (city, _) => (city, 1) }.reduceByKey(_ + _).take(15).foreach(println)

    // 4、关闭SparkContext
    sc.stop()
    spark.close()
  }

  def ip2Long(ip: String): Long = {
    val strings: Array[String] = ip.split("\\.")
    strings(0).toInt * 256 * 256 * 256L + strings(1).toInt * 256 * 256L + strings(2).toInt * 256L + strings(3).toInt
  }
}
