package com.test.cn.spark.core

import org.apache.spark.sql.{DataFrame, SparkSession}

object IpQuestion {

  def main(args: Array[String]): Unit = {
    //创建sparkSession
    val spark = SparkSession
      .builder()
      .appName("ipQuestion")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._

    //创建ip表
    val ipSchema = "startIp string," +
      "endIp string," +
      "startIpNum long," +
      "endIpNum long," +
      "stateStr string," +
      "country string," +
      "province string," +
      "city string," +
      "xxx_1 string,xxx_2 string,xxx_3 string,xxx_4 string,xxx_5 string,xxx_6 string,xxx_7 string"
    spark.read
      .options(Map(("delimiter", "|")))
      .schema(ipSchema)
      .csv("spark_scala_home_work/src/data/ip.dat")
      .createTempView("ip_table")

    //创建http表 时间戳、IP地址、访问网址、访问数据、浏览器信息
    val httpScheme = "timelong string,ipStr string,url string,urlData string,browserInfo string"
    spark.read
        .option("delimiter","|")
        .schema(httpScheme)
        .csv("spark_scala_home_work/src/data/http.log")
        .createTempView("http_info")

    //定义一个udf ip=>long
    def ip2long(ipStr:String):Long ={
      val str: Array[String] = ipStr.split("\\.")
      var ipNum = 0L
      for (i <- 0 until str.length){
        ipNum =  str(i).toLong | ipNum << 8L
      }
      ipNum
    }
    spark.udf.register("ip2long",ip2long _)


    //进行拼接
    spark.sql(
      """
        |select city,count(1) as total from (
        |   select n.country,n.province,n.city from
        |     (select distinct ip2long(ipStr) as ipNum from http_info) as m
        |       join ip_table as n
        |      on ( m.ipNum >= n.startIpNum and n.endIpNum >= m.ipNum )
        |) as t group by city
      """.stripMargin
    ).show


    //关闭
    spark.close()

  }


}
