package SparkShiXun1

import java.text.SimpleDateFormat
import java.util.Locale

import SparkShiXun.{IPUtil, RedisUtil}
import kafka.Kafka
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.parsing.json.JSON

/**
  * Created by lenovo on 2017/7/12.
  * 练习
  * 2017年7月12日15:48:53
  */
object AreaPuUvStatTask1 {

  def main(args: Array[String]) {

    //设置hadoop的环境
    System.setProperty("hadoop.home.dir","E://hadoop-liyadong//hadoop-2.7.1")

    //                zk的节点                                topic的分组名     topic名      线程数
    val Array(zk,groupID,topics,numThread) = Array("hadoop1:2181,hadoop2:2181,hadoop3:2181","g","ant_log_pc_access","1")
    //val Array(zk,groupID,topics,numThread) = Array("192.168.52.22:2181,192.168.52.23:2181,192.168.52.24:2181","g","ant_log_pc_access","1")

    //--------------------------初始化spark-----------------
    val sparkConf = new SparkConf().setAppName("AreaPuUvStatTask1").setMaster("local[*]").set("spark.testing.memory","2147480000")

    val sc = new SparkContext(sparkConf)

    val ssc = new StreamingContext(sc,Seconds(5))

    //--------------------------广播IP--------------------------

    val ipRDD = sc.textFile("D://ip.txt")
    //1.0.1.0|1.0.3.255|16777472|16778239|亚洲|中国|福建|福州||电信|350100|China|CN|119.306239|26.075302
    val ipMapRDD = ipRDD.map(line =>{
     val fields = line.split("\\|")
    //开始ip数
      val startNum = fields(2)
    //结束ip数
     val endNum = fields(3)
    //省份
      val area = fields(6)

      (startNum,endNum,area)
    })
    val ipValue = ipMapRDD.collect()

    val ipBroadcast = sc.broadcast(ipValue)
  //----------------------------开始流式数据处理---------------------------
    //因为有可能不止一个topic
    val topicMap = topics.split(",").map((_,numThread.toInt)).toMap
    //获得streaming的Dstream
    val dStream = KafkaUtils.createStream(ssc,zk,groupID,topicMap)
    dStream.print()
   val clientDateInfo = dStream.map(_._2).map(line =>{
      val clientInfo = JSON.parseFull(line)

      clientInfo match {
        case Some(obj:Any) =>{
          //将JSON对象强转成Map
         val clientInfoMap = obj.asInstanceOf[Map[String,Any]]
          //内网ip
          var client_ip = clientInfoMap("client_ip").toString
          //时间
          val time = clientInfoMap("request_time").toString
          //定义时间格式
          val sdf = new SimpleDateFormat("dd/MMM/yyyy:hh:mm:ss +0800",Locale.ENGLISH)
          //转换成系统时间
          val dt = sdf.parse(time)
          //
          sdf.applyPattern("yyyy-MM-dd")
          val  day = sdf.format(dt)

          //转换成外网ip
           client_ip = IPUtil.parseNetIP(client_ip)
          //ip转换成十进制
          val ipNum = IPUtil.ipToLong(client_ip)
          //用二分查找ip
          val ipIndex = IPUtil.binarySearch(ipBroadcast.value,ipNum)
          val ipPase = ipBroadcast.value(ipIndex)
          (client_ip,day,ipPase._3)
        }
      }
    })

    //------------------------保存redis上-------------------------
    val area_pv_uv = "*ant_area_pv_uv_"
    val area_uv = "*ant_area_uv_"
    clientDateInfo.foreachRDD(rdd =>{
      rdd.foreachPartition(it =>{
       val JedisClient = RedisUtil.getJedis()
        it.foreach(info =>{
          JedisClient.hincrBy(area_pv_uv+info._2,info._3+"_pv",1)
          if(JedisClient.hexists(area_uv+info._2,info._1)){
            JedisClient.hincrBy(area_uv+info._2,info._1,1)
            JedisClient.hincrBy(area_pv_uv+info._2,info._3+"_uv",1)
          }
        })
        JedisClient.close()
      })
    })
    ssc.start()
    ssc.awaitTermination()
  }
}
