package com.lagou.no1

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object No1 {
    def main(args: Array[String]): Unit = {
        //设置日志级别
        Logger.getLogger("org").setLevel(Level.WARN)
        //创建sparkSession对象
        val spark = SparkSession.builder()
                .appName(s"${this.getClass.getCanonicalName}")
                .master("local[*]")
                .getOrCreate()
        import spark.implicits._
        //读取http.log文件，生成df,指定每个列名称
        val httpDf = spark.read
                .option("header", "false")
                .option("inferschema", "true")
                .option("delimiter", "|")
                .csv("data/http.log").toDF("timestamp", "ip", "url", "data", "broswer1", "broswer2", "broswer3")
        //注册成表
        httpDf.createOrReplaceTempView("http")
        //读取ip.dat文件，生成df，指定列名
        val ipDf = spark.read
                .option("header", "false")
                .option("inferschema", "true")
                .option("delimiter", "|")
                .csv("data/ip.dat").select("_c0", "_c1", "_c7").toDF("startIp", "endIp", "city")
        //注册成表
        ipDf.createOrReplaceTempView("ip")

        //注册自定义udf函数
        spark.udf.register("ip2Long", ip2Long _)

        //编写sql
        spark.sql(
            """
              |select b.city,count(1) as visitNum from
              |(select timestamp,
              |        ip,
              |        ip2Long(ip)  as longIp,
              |        url,
              |        data,
              |        concat(broswer1,nvl(broswer2,''),nvl(broswer3,'')) broswer
              | from http) a
              | left join
              | (select  ip2Long(startIp)  as longStartIp,
              |          ip2Long(endIp)  as longEndIp,
              |          city
              | from ip) b
              | on a.longIp >= b.longStartIp
              |    and a.longIp <= b.longEndIp
              | group by b.city
              |""".stripMargin).show(5000, false)
        //关闭spark
        spark.close()

    }

    //将点分十进制的IP字符串转为long型
    def ip2Long(ip: String): Long = {
        val fragments = ip.split("[.]")
        var ipNum = 0L
        for (i <- 0 until fragments.length) {
            ipNum = fragments(i).toLong | ipNum << 8L
        }
        ipNum
    }
}
