package com.txl.cn.spark06

import java.util.Properties

import com.txl.cn.spark03.LocalIP
import org.apache.spark.sql.{Dataset, SaveMode, SparkSession}

/**
  * Created by txl on 2018/1/3.
  */
object DataSetBC {
  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder()
      //.master("local[*]")
      //.appName("DataSetDemo2")
      .getOrCreate()
    import session.implicits._
    val ipRulesData: Dataset[String] = session.read.textFile(args(0))
    val logs = session.read.textFile(args(1))
    val ipRules2=ipRulesData.map({
      t=>
        val ips = t.split("[|]")
        val start = ips(2).toLong
        val end = ips(3).toLong
        val province = ips(6)
        (start,end,province)
    })
    val ipdata: Array[(Long, Long, String)] = ipRules2.collect()
    val ipBC = session.sparkContext.broadcast(ipdata)
    val ipDF= logs.map({
      t =>
        val ip: String = t.split("[|]")(1)
        val ipnum: Long = LocalIP.ip2Long(ip)
        ipnum
    }).toDF("ipNum")

    ipDF.createTempView("access")
    val ipRules= ipBC.value

   session.udf.register("ip2Province", (ipnum: Long) => {

      ipnum
      // 二分搜索
      val index: Int = LocalIP.search(ipnum, ipRules)
      // 根据索引找到对应的值
      var province = "unknown"

      if (index != -1) {
        province = ipRules(index)._3
      }
      // 把省份结果返回
      province
    })
    val sql = session.sql("select ip2Province(ipnum) as province,count(*) sums from access group by province order by sums desc")
    //sql.show()
    val url="jdbc:mysql://192.168.16.8:3306/scott?characterEncoding=utf-8"
   // val url="jdbc:mysql://localhost/scott?characterEncoding=utf-8"
    val tname="access_log"
    val p = new Properties()
    p.setProperty("user","root")
    p.setProperty("password","123456")
    p.setProperty("driver","com.mysql.jdbc.Driver")
    sql.write.mode(SaveMode.Append).jdbc(url,tname,p)
    session.close()
  }

}
