package core_sql.day06_sql

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
  * Created by zx on 2017/8/26.
  */
object IPLocation_SQL {

  val ip2Long = (ip: String) => {
    val fragments = ip.split("[.]")
    var ipNum = 0L
    for (i <- 0 until fragments.length){
      ipNum =  fragments(i).toLong | ipNum << 8L
    }
    ipNum
  }

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("DataSourceDemo1")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._

    //先读取IP规则数据
    val ipLines: Dataset[String] = spark.read.textFile(args(0))
    //整理ip规则数据
    val ruleDF: DataFrame = ipLines.map(line => {
      val fields = line.split("[|]")
      val startNum = fields(2).toLong
      val endNum = fields(3).toLong
      val province = fields(6)
      (startNum, endNum, province)
    }).toDF("start_num", "end_num", "province")

    //读取并整理访问log数据
    val logLine: Dataset[String] = spark.read.textFile(args(1))
    val ipDF: DataFrame = logLine.map(line => {
      val fields = line.split("[|]")
      val ip = fields(1)
      ip
    }).toDF("ip")

    ruleDF.createTempView("v_rules")
    ipDF.createTempView("v_logs")

    //注册函数并指定名字
    spark.udf.register("ip2Long", ip2Long)

    val r = spark.sql("SELECT province FROM v_logs JOIN v_rules ON ip2Long(ip) >= start_num AND ip2Long(ip) <= end_num")

    r.createTempView("v_temp")


    r.show(10)

  }

}
