import org.apache.spark.{SparkConf, SparkContext}

object Q1_IPtoCity {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    //读文件
    val ips = sc.textFile("F:\\TrainingCourse\\P1 大数据\\大数据正式班第四阶段模块二\\spark实战应用（上）\\代码\\data\\ip.dat")
    val httpLogs = sc.textFile("F:\\TrainingCourse\\P1 大数据\\大数据正式班第四阶段模块二\\spark实战应用（上）\\代码\\data\\http.log")

    //筛选
    //获取ip与城市的关系
    val ipCity = ips.map(ip => {
      val fields = ip.split("\\|")
      ((fields(0),fields(1)), fields(7)) //IPStart IPEND CITY
    })
    //获取ip字段
    val ipCount = httpLogs.map(log => {
      val fields = log.split("\\|")
      (fields(1), 1) //IP Count
    })
    //不等值连接
    ipCount
      .cartesian(ipCity)
      .filter(x => {
        val IP =IPAddr(x._1._1)
        val startIP = IPAddr(x._2._1._1)
        val EndIP = IPAddr(x._2._1._2)
        startIP <= IP && IP <= EndIP
      })
      .map(x => {
        val city = x._2._2
        val count = x._1._2
        (city, count)
      })
      .reduceByKey(_ + _)
      .sortByKey()
      .collect()
      .foreach(println)
    sc.stop()
  }
}

class IPAddr(s:String) extends Ordered[IPAddr] with Serializable{
  val IP: Seq[Int] = s.split("\\.").map(_.toInt)
  override def compare(that: IPAddr): Int = Ordering[Iterable[Int]].compare(IP,that.IP)
}
object IPAddr {
  def apply(s: String): IPAddr = new IPAddr(s)
}