package SparkShiXun

import java.text.SimpleDateFormat
import java.util.Locale

import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.parsing.json.JSON

/**
  * Created by Administrator on 2017/5/14 0014.
  *  1，需求，统计每天每个地区的pv，uv
  * 2，目标
  * ------------|--------|------|-------|
  * day         |area    | pv   |     uv|
  * ------------|--------|------|-------|
  * 2016-09-01	|上海    |687   |    100|
  * ------------|--------|------|-------|
  * 2016-09-01	|北京	   |649   |    80	|
  * ------------|--------|------|-------|
  * 2016-09-01	|天津	   |612   |    93	|
  * ------------|--------|------|-------|
  * 2016-09-02	|广州    |1249  |     18|
  * ------------|--------|------|-------|
  * 3，思路
  * step1,将ip映射表广播到spark的每一个worker上
  * 广播的内容包括以下（使用元组的方式）
  * （开始ip，结束ip，省份）  *
  * step2，2.1从kafka读取数据 （{"client_ip","192.168.200.162",request_time:"01/Sep/2016:10:52:14 +0800",xxxxx:xxxxx}）
  * 2.2 关键点，将json数据转换成对象
  * 2.3 获得到json对象中需要的字段,封装成元组 （请求时间，地区）
  * 2.3.1  将内网ip转换成外网ip
  * 2.3.2  将ip转换成十进制
  * 2.3.3使用二分查找法找到对应ip（十进制），所对应的地区
  *
  * step3,将结果数据保存在redis
  * step1，用日期作为 key例如 ant_bi_area_pv_uv_2016-09-01 (key)  上海（field）
  *
  */
object AreaPvUvStatTask {
  def main(args: Array[String]) {
    //---------------------------------初始化Spark-------------------

    System.setProperty("hadoop.home.dir","E://hadoop-liyadong//hadoop-2.7.1")
//    LoggerLevels.setStreamingLogLevels()

    val Array(zk,groupID,topics,numThread) = Array("192.168.52.22:2181,192.168.52.23:2181,192.168.52.24:2181","g","ant_log_pc_access","1")

    val sparkConf = new SparkConf().setMaster("local[*]")
      .setAppName("AreaPvUvStatTask")
      .set("spark.testing.memory","2147480000")

    val sparkContext = new SparkContext(sparkConf)

    val ssc = new StreamingContext(sparkContext,Seconds(5));


    //-----------------------------------开始广播IP---------------------

  // val ipMapRDD = sparkContext.textFile("hdfs://192.168.52.22:8020/ip.txt")
    val ipMapRDD = sparkContext.textFile("D://ip.txt")

      .map(line =>{
       val filelds = line.split("\\|")
       val startNum = filelds(2)
       val endNum = filelds(3)
       val area = filelds(6)

        (startNum,endNum,area)

      })
    val ipValue = ipMapRDD.collect()
    val ipBroadcast = sparkContext.broadcast(ipValue)

    //------------------------------开始流式业务处理------------------------

    val topicMap = topics.split(",").map((_,numThread.toInt)).toMap
    val dStreams = KafkaUtils.createStream(ssc,zk,groupID,topicMap)
//dStreams.print()
   val clientDetailInfo = dStreams.map(_._2).map(line =>{
      val clientInfo = JSON.parseFull(line)

      clientInfo match{
        case Some(obj:Any) => {
          val map = obj.asInstanceOf[Map[String,Any]]
          //内网IP
          var client_ip = map("client_ip").toString

          //01/Sep/2016:10:52:14 +0800
          val time = map("request_time").toString
          val sdf = new SimpleDateFormat("dd/MMM/yyyy:hh:mm:ss +0800",Locale.ENGLISH)
          val dt = sdf.parse(time)
          sdf.applyPattern("yyyy-MM-dd")
          val day = sdf.format(dt)

          //外网ip
          client_ip = IPUtil.parseNetIP(client_ip)

          //ip转换成十进制
          val ipNum = IPUtil.ipToLong(client_ip)

          val index = IPUtil.binarySearch(ipBroadcast.value,ipNum)
          val ip_area = ipBroadcast.value(index)

          (client_ip,day,ip_area._3)

        }
      }
    })

    //-------------------------------保存到redis上------------------------
    val area_pv_uv = "ant_area_pv_uv_"
    val area_uv = "ant_area_uv_"
    clientDetailInfo.foreachRDD(rdd =>{
      rdd.foreachPartition(it =>{
        val jedisClient = RedisUtil.getJedis()
        it.foreach(info =>{
          println("redis"+info.toString())
          jedisClient.hincrBy(area_pv_uv+info._2,info._3+"_pv",1)
          if(!jedisClient.hexists(area_uv+info._2,info._1)){
            jedisClient.hincrBy(area_uv+info._2,info._1,1)
            jedisClient.hincrBy(area_pv_uv+info._2,info._3+"_uv",1)
          }
        })
        jedisClient.close()
      })
    })
    ssc.start()
    ssc.awaitTermination()
  }

}
