import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

/**
 * ClassName: Homework1 <br/>
 * Description: <br/>
 * date: 2021/7/5 19:41<br/>
 *
 * @author Hesion<br/>
 * @version
 * @since JDK 1.8
 */
object Homework1 {
  def main(args: Array[String]): Unit = {
    // 初始化,框架代码
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName).setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder()
      .config(conf)
      .getOrCreate()
    // 设置日志级别
    spark.sparkContext.setLogLevel("WARN")
    import spark.implicits._

    //读取log表的数据
    spark.read
      .option("delimiter", "|")
      .csv("data/http.log")
      .map(row => row.getString(1))
      .createOrReplaceTempView("t1")

    //ip表的数据，因为是作为参考的表，数量量相对较小。为了提高性能做成广播变量
    val ipData: Array[(Long, Long, String)] = spark.read
      .option("delimiter", "|")
      .csv("data/ip.dat")
      .map(row => (row.getString(2).toLong, row.getString(3).toLong, row.getString(6)))
      .collect()

    //作为广播变量，排个序方便进行查找
    val ipBC: Broadcast[Array[(Long, Long, String)]] = spark.sparkContext.broadcast(ipData.sortBy(_._1))


    //难点来了，如何去匹配ip地址并转换成地址
    //在网络工程中，常用位运算进行同网段的计算
    //因为ip地址为32位的二进制数组成，通常用.进行分割，8位二进制，4个字节
    //思路可以用到这里
    //定义一个UDF来进行IP地址的匹配
    def ip2Long(ip: String): Long = (
      ip.split("\\.")
        .map(_.toLong)
        .fold(0L) {
          (buffer, elem) => {
            println("--------------------")
            println(buffer << 8 | elem)
            println(elem)
            println("--------------------")
            buffer << 8 | elem
          }
        }
      )

    def getCityName(ip: Long):String={
      val ips: Array[(Long, Long, String)] = ipBC.value
      // 对广播变量进行二分查找，因为广播变量之前已经做过排序
      var start = 0
      var end = ips.length - 1
      var middle = 0

      while(start<=end){
        middle=(start+end)/2
        if(ip>=ips(middle)._1&&ip<=ips(middle)._2)
          return ips(middle)._3
        else if (ip < ips(middle)._1)
          end = middle-1
        else
          start=middle+1
      }
      "Unknown"
    }

    spark.udf.register("ip2Long",ip2Long _)
    spark.udf.register("getCityName",getCityName _)

    spark.sql(
      """
        |select getCityName(ip2Long(value)) as provice, count(1) as no
        | from t1
        | group by getCityName(ip2Long(value))
        |""".stripMargin).show()

    //关闭SparkContext
    spark.close()
  }
}
