package com.haozhen.homework

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2020/12/29  23:55
  */
object Work1 {

  def isRangeWithIp(ip:String,downIp:String,upIp:String)={
     ipTOLong(downIp)<=ipTOLong(ip) && ipTOLong(ip) <= ipTOLong(upIp)
  }

  def ipTOLong(ip:String)={
    java.lang.Long.parseLong(ip.replace(".",""))
  }

  def main(args: Array[String]): Unit = {

    import org.apache.spark.{SparkConf, SparkContext}
    val conf = new SparkConf().setAppName("work1").setMaster("local[*]")
     val sc = new SparkContext(conf)
     sc.setLogLevel("warn")

    //获取数据源
    val httpIpsData = sc.textFile("data/http.log")
    val ipAddressData = sc.textFile("data/ip.dat")

    //ip范围信息
    val ipaddr = ipAddressData.map {
      line =>
        val fields = line.split("\\|")
        (fields(0), fields(1), fields(7))
    }
    //网页访问信息
    val httpIp = httpIpsData.map{
      line => val fields = line.split("\\|")
        (fields(1),line)
    }

    //将网页ip去重
    val logIpRdd = httpIp.map(_._1).distinct()

    //笛卡尔积 并保留网页ip在 ip范围的数据
    val ipaddr2 = ipaddr.cartesian(logIpRdd).filter{
      case ((downIp,upIp,_),ip) => isRangeWithIp(ip,downIp,upIp)
    }.map{
      case ((_,_,addr),ip) =>(ip,addr)
    }

    //将http日志信息的ip改为address
    val result = httpIp.leftOuterJoin(ipaddr2).map{
      case (ip,(line,opt)) =>(opt.getOrElse("未知地区"), line.replace(ip,opt.getOrElse("未知地区")))
    };
    result.map(_._2).saveAsTextFile("data/address_ip")
    result.mapValues(v=>1).reduceByKey(_+_).foreach{
      case (k,v)=>
      println(k+"\t"+v)
    }


    sc.stop()
  }

}
