package kk.learn.spark.work._1

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * <p>
 *
 * </p>
 *
 * @author KK
 * @since 2021-03-05
 */
object IpArea {
  // 点分十进制转长整型
  def ipStr2Long(ipStr:String):Long = {
    val parts = ipStr.split("\\.").map(_.toInt)
    parts(0)  * 256L * 256  * 256 + parts(1) * 256L * 256 + parts(2) * 256L + parts(3)
  }

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("IpArea")
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")

    // 自定义一个函数, 将ip转为long进行处理
    spark.udf.register("ipStr2Long", ipStr2Long _)

    // 读取文件

    //  读取ip.dat文件
    spark.read.format("csv")
      .option("sep", "|")
      .load("code/data/ip.dat")
      .selectExpr("ipStr2Long(_c0) ipStart",  "ipStr2Long(_c1) ipEnd", "_c7 city")
      .createOrReplaceTempView("ip_table")

    //  读取http.log文件
    spark.read.format("csv")
      .option("sep", "|")
      .load("code/data/http.log")
      .selectExpr("ipStr2Long(_c1) ip")
      .createOrReplaceTempView("http_table")

   // 数据处理
    spark.sql(
      """
        |select i.city, count(h.ip) visitCount
        |from http_table h
        |left join ip_table i
        |on h.ip >= i.ipStart and h.ip <= i.ipEnd
        |group by i.city
        |""".stripMargin).show

    spark.close()
  }
}
