package spark.sql.practice

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import spark.sql.utils.IpTransform

/**
 * @Author Jeremy Zheng
 * @Date 2021/3/29 17:29
 * @Version 1.0
 */
object IpCount {
  def main(args: Array[String]): Unit = {
    //构建环境
    val spark: SparkSession = SparkSession.builder().master("local").appName("IpCountDemo1").getOrCreate()
    import spark.implicits._
    //读取文件
    val accDS: Dataset[String] = spark.read.textFile("D:\\qq文件\\1203368011\\access.log")
    val ipDS: Dataset[String] = spark.read.textFile("D:\\qq文件\\1203368011\\ip.txt")
    //处理数据
      //access.log
      //先把ip转换成10进制。ip -> longip-> t_access
      val accDF: DataFrame = accDS.map(line => {
        val strings: Array[String] = line.split("[|]")
        IpTransform.IpToLong(strings(1))
      }).toDF("longIp")
      //ip.txt
      //取出来三个字段，startip、endip、province ->t_ip
      val ipDF: DataFrame = ipDS.map(line => {
        val strings: Array[String] = line.split("[|]")
        val startIp: String = strings(2)
        val endIp: String = strings(3)
        val province: String = strings(6)
        (startIp, endIp, province)
      }).toDF("startIp", "endIp", "province")
    //创建临时表
    accDF.createOrReplaceTempView("v_acc")
    ipDF.createOrReplaceTempView("v_ip")
    //两表join操作(需求：计算每个省份的网页访问量)
      //v_acc:来页面访问的ip
      //v_ip:省份ip的范围
    spark.sql("select province,count(1) from v_acc join v_ip on longIp between startIp and endIp group by province").show()
    //关闭资源
    spark.close()
  }
}
