package cn.lagou.sparksql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.sql.types._

case class LogIp(ip:String)
case class IpInfo(startIp:String, endIp:String, startIpNum:Long, endIpNum:Long, city:String)
object FindIp {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("FindIp")
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")


    import spark.implicits._
    //将两个文件进行转化，做成临时表
    val logIpRDD: RDD[LogIp] = sc.textFile("data/http.log").map(f => LogIp(f.split("\\|")(1)))

    val logIpDS: Dataset[LogIp] = logIpRDD.toDS()

    val infoRDD: RDD[Array[String]] = sc.textFile("data/ip.dat").map(_.split("\\|"))
    val ipInfoRDD: RDD[IpInfo] = infoRDD.map(f => IpInfo(f(0),f(1),f(2).toLong,f(3).toLong,f(7)))

    val ipInfo: Dataset[IpInfo] = ipInfoRDD.toDS()

    logIpDS.createOrReplaceTempView("logip")
    ipInfo.createOrReplaceTempView("ipinfo")

    //把ip转换为long类型，注册为udf
    def ip2Long(ip: String): Long = {
      val ipStr = ip.split("\\.")
      var ipNum = 0L

      for(i <- 0 until ipStr.length){
        ipNum = ipStr(i).toLong | ipNum << 8L
      }
      ipNum
    }
    spark.udf.register("ip2Long",ip2Long _)

    //将http和ip进行ip的long类型对比，http内的ip大于startip的数值和小于endip的数值就是某城市的ip
    spark.sql(
      """
        |select city,count(*)
        |from (select * ,ip2long(ip) as ipnum from logip) t1,
        |ipinfo t2
        |where t1.ipnum >= t2.startipnum and t1.ipnum <= t2.endipnum
        |group by city
        |""".stripMargin).show


    spark.close()




//    val schemaStr = "ts string, ipaddr string, webpage string, datastr string, browserinfo string, other string"
//    val logDf = spark.read
//      //.option("header","true")
//      .option("inferschema","true")
//      .option("delimiter","|")
//      .schema(schemaStr)
//      .csv("data/http.log")
//
//    val logIp = logDf.select("ipaddr")


//
//    val schemaStr1 = "startIp string, endIp string, webpage string, datastr string, browserinfo string, other string"
//    val ipDf = spark.read
//      //.option("header","true")
//      .option("inferschema","true")
//      .option("delimiter","|")
//      .schema(schemaStr)
//      .csv("data/http.log")
  }
}
