package com.txl.cn.spark06

import java.util.Properties

import com.txl.cn.spark03.LocalIP
import org.apache.spark.sql.{DataFrame, Dataset, SaveMode, SparkSession}

/**
  * Created by txl on 2018/1/3.
  */
//ip地址归属地，利用sparksql的多表关联实现
object DataSetDemo2  extends  App{
  val session = SparkSession.builder()
    .master("local")
    .appName("DataSetDemo2")
    .getOrCreate()

   val ipRules: Dataset[String] = session.read.textFile("data/ip.txt")
   val logs = session.read.textFile("data/access.log")
   import session.implicits._
  val ipRulesDF=ipRules.map({
    t=>
      val ips = t.split("[|]")
      val start = ips(2).toLong
      val end = ips(3).toLong
      val province = ips(6)
      (start,end,province)
  }).toDF("start","end","province")


    val ipDF= logs.map({
    t =>
      val ip: String = t.split("[|]")(1)
      val ipnum: Long = LocalIP.ip2Long(ip)
      ipnum
  }).toDF("ipNum")
  ipRulesDF.createTempView("ipdata")
  ipDF.createTempView("p")
  val res: DataFrame = session.sql("select  province, count(*) counts from ipdata inner join p on(p.ipNum>=ipdata.start and p.ipnum <=ipdata.end) group by province")
 //.show()

  //res.write.mode("overwrite").json("data/json")


}
