package com.migu.streaming

import com.migu.hbase.OnlineHBase
import com.migu.utils._
import kafka.serializer.StringDecoder
import org.apache.commons.codec.digest.DigestUtils
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.slf4j.LoggerFactory


/**
  * Created by maguoyun on 2017/03/22.
  */
object OnlineUserStreaming {

  val logger = LoggerFactory.getLogger(this.getClass)

  def calculate(ssc: StreamingContext): Unit = {

    val bcHBase = ssc.sparkContext.broadcast(OnlineHBase.apply)
//    val bcJdbc = ssc.sparkContext.broadcast(OnlineJdbc.apply)
    //  kafka cluster conf
    val sparkConf = ConfigUtil.getConfig()
    val brokers = sparkConf.getOrElse("spark.broker","192.168.107.88:9092")
    val topicStr = sparkConf.getOrElse("spark.topic", "cdn_detail")   //topic_name
    val ouTopic = Set(topicStr)
    val groupId = sparkConf.getOrElse("spark.group.id", "test-visit-group")
    val kafkaParams = Map[String, String](
      "metadata.broker.list" -> brokers,
      "serializer.class" -> "kafka.serializer.StringEncoder",
      "group.id" -> groupId,
      "num.consumer.fetchers" -> "10"
    )

    //input
    val lines: DStream[String] = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc,kafkaParams,ouTopic)
     .map(_._2)

    /**
     *  1,解析url，获取domain，uid，channelid
     *  2，根据IP去重
     *  3，统计Online People
     */
    def getTypes(line: String): ((String, String,String,String), Long) = {
      try{
        //url解析类
        val urlp: Array[String] = BizUtils.httpParse(line.split("\\|")(3))
        //      val domain = urlp(2).filter(dm => dm.equals("mglive.migucloud.com"))
        val domain: String = urlp(2)
        var newDomain = ""
        var sourceip = ""
        if (domain.contains("mglive.migucloud.com")) {
//          newDomain = MD5.hash(domain)
//          newDomain = Domainmd5.EncoderByMd5(domain)
          newDomain = DigestUtils.md5Hex(domain)
          sourceip = line.split("\\|")(4)
          ((urlp(3), urlp(4).split("\\.")(0), sourceip, newDomain), 1) //((uid,channelid,ip,md5domain),1)
        }
        else ((urlp(3), urlp(4).split("\\.")(0), sourceip, newDomain), 0)
      } catch{
        case e => println("can not parse the input streaming-----"+e.getMessage)
//        case e : Throwable => println("can not parse the input streaming-----"+e.getMessage)
          null
      }

    }
    val ouRdd: DStream[((String, String, String), Int)] = lines.map(getTypes)       //((uid,channelid,ip,md5domain),1)
      .filter(_!=null)
      .reduceByKey(_+_).filter(_._2!=0)   //根据IP去重  并且去掉域名不符合规定的  ((uid,channelid,ip,md5domain),value)
      .map(u => ((u._1._1, u._1._2,u._1._4),1))     // ((uid,channelid,md5domain),1)
      .reduceByKey(_+_)           // ((uid,channelid,md5domain),ou)

    //output
    ouRdd.foreachRDD(rdd =>{
      rdd.foreachPartition(prdd =>{
        prdd.foreach(record =>{
          val ouHbase = bcHBase.value
//          val ouJdbc = bcJdbc.value
          val rk = DateUtil.getOnlineUserDateStr()+ "_" + record._1._2+ "_" + record._1._1 + "_" + record._1._3
          ouHbase.insertOnlinePeople(rk,record._2.toString)
//          ouJdbc.insertOu(DateUtil.getOnlineUserDateStr(),record._1._1,record._1._2,record._2)
        })
      })
    })
  }
}
