package com.atguigu.gmall.realtime.app

import java.text.SimpleDateFormat
import java.time.{LocalDate, Period}
import java.{lang, util}
import java.util.Date

import com.alibaba.fastjson.{JSON, JSONObject}
import com.atguigu.gmall.realtime.bean.{DauInfo, PageLog}
import com.atguigu.gmall.realtime.utils.{MyBeanUtils, MyEsUtils, MyKafkaUtils, MyOffsetUtils, MyRedisUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.{Jedis, Pipeline}

import scala.collection.mutable.ListBuffer

/**
 * 日活宽表
 * 1.获取环境对象
 * 2.读取redis中的offset
 * 3.从kafka中消费数据
 * 4.获取当前消费的offset
 * 5.处理数据
 *     5.1转换数据结构
 *     5.2 去重操作
 *        自我审查
 *        第三方审查
 *     5.3 维度关联 用户和地区
 *6.提交offset
 */
object DwdDauApp {
  def main(args: Array[String]): Unit = {
    //1.获取环境对象
    val conf: SparkConf = new SparkConf().setAppName("dwd_dau_app").setMaster("local[*]")
    val ssc = new StreamingContext(conf,Seconds(5))

    //2.读取redis中的offset
    val topic = "DWD_PAGE_TOPIC_1118"
    val groupId = "DWD_DAU_GROUP"
    val offsets: Map[TopicPartition, Long] = MyOffsetUtils.readOffset(topic,groupId)

    // 3.从kafka中消费数据
    var kafkaDStream: InputDStream[ConsumerRecord[String, String]] = null
    if(offsets != null && offsets.nonEmpty){
      kafkaDStream = MyKafkaUtils.getKafkaDstream(ssc,topic,groupId,offsets)
    }else{
      kafkaDStream = MyKafkaUtils.getKafkaDstream(ssc,topic,groupId)
    }


    // 4.获取当前消费的offset
    var offetRanges: Array[OffsetRange] = null
    val offsetDStream: DStream[ConsumerRecord[String, String]] = kafkaDStream.transform(
      rdd => {
        offetRanges  = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )
    offsetDStream



    // 5.处理数据
    //     5.1转换数据结构
    val pageLogDStream: DStream[PageLog] = offsetDStream.map(
      consumerRecord => {
        val jsonStr: String = consumerRecord.value()
        val pageLog: PageLog = JSON.parseObject(jsonStr, classOf[PageLog])
        pageLog
      }
    )
    //统计每批次每个RDD里的数据的个数
    pageLogDStream.cache()
    pageLogDStream.foreachRDD(
      rdd =>  println("自我审查前：" + rdd.count())
    )

    pageLogDStream
    //5.2 数据去重，将某个人或者某个设备id同一天的多条访问记录处理成一条数据，pagelog里每一条数据表示一个用户浏览一个具体页面


    //5.2.1 自我审查，将last_page_id不为空的过滤掉
    val filterDStream: DStream[PageLog] = pageLogDStream.filter(//过滤出来
      pageLog => {
        pageLog.last_page_id == null
      }
    )
    filterDStream.cache()
    filterDStream.foreachRDD(
      rdd => {
        println("自我审查后 ： " + rdd.count())
        println("-----------------------------")
      }
    )
    //5.2.2 第三方审查,将审查后的数据与redis中的维护数据进行比对,如果有不要，没有就加进去
     //type : list  set  zset
     //key: DAU:date
     //value: 所有的mid/所有的uid
     //写入api: lpush  sadd
     //读取api: lrange  smembers
     //key是否过期: 24小时过期

/*    filterDStream.filter(
      pageLog => {
        每条数据需要开启redis连接，不好
        开启redis连接
        去重操作
        关闭 redis连接
      }
    )*/
    //不用filter
    val redisFilterDStream: DStream[PageLog] = filterDStream.mapPartitions(
      pageLogIter => {
        val pageLogLists: List[PageLog] = pageLogIter.toList
        println("第三方审查前:" + pageLogLists.size)
        val jedis: Jedis = MyRedisUtils.getJedis()
        //将要的数据放在listbuffer中
        val pageLogs = new ListBuffer[PageLog]()
        val sdf = new SimpleDateFormat("yyyy-MM-dd")
        for (pageLog <- pageLogLists) {

          //日期：生产环境基本上是当天日期
          val mid: String = pageLog.mid
          val ts: Long = pageLog.ts
          val date: String = sdf.format(new Date(ts))
          val dauKey: String = s"DAU:$date"
        /*
                  // lsit
          //判断redis中是否包含mid
          val mids: util.List[String] = jedis.lrange(dauKey, 0, -1)
          //分布式里多个并行度可能会重复数据，相同的数据每个并行度里都保存一个
          if (!mids.contains(mid)) {
            jedis.lpush(dauKey, mid)
            pageLogs.append(pageLog)
          }*/

          //使用set
          val isOK: lang.Long = jedis.sadd(dauKey,mid) //判断和写入保持原子性，
          if(isOK == 1){//
            pageLogs.append(pageLog)
            //设置过期时间
            jedis.expire(dauKey,24 * 3600)
          }
        }

     MyRedisUtils.close(jedis)
    println("第三方审查后：" + pageLogs.size)
    pageLogs.toIterator
    }
    )
    //redisFilterDStream.print(100)
    //5,3维度关联
    val dauInfoDStream: DStream[DauInfo] = redisFilterDStream.mapPartitions(
      pageLogeIter => {
        val jedis: Jedis = MyRedisUtils.getJedis()
        val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
        val dauInfoIter: Iterator[DauInfo] = pageLogeIter.map(
          pageLog => {
            val dauInfo = new DauInfo()
            //pageLog => dauInfo

            //1.将pageLog中现有的字段复制到dauInfo，笨办法挨个赋值
            MyBeanUtils.copyFiled(pageLog,dauInfo)

            //2.关联用户维度
            val userId: String = pageLog.user_id
            val redisUserKey : String = s"DIM:USER_INFO:$userId"
            val userJson: String = jedis.get(redisUserKey)
            val userJsonObj: JSONObject = JSON.parseObject(userJson)
            val gender: String = userJsonObj.getString("gender")
            val birthday: String = userJsonObj.getString("birthday")
            //换算年龄
            val birthdayLd: LocalDate = LocalDate.parse(birthday)
            val nowLd: LocalDate = LocalDate.now()
            val period: Period = Period.between(birthdayLd,nowLd)
            val age: Int = period.getYears
            //补充维度
            dauInfo.user_gender = gender
            dauInfo.user_age = age.toString
            //3.关联地区维度
            val provinceId: String = pageLog.province_id
            val provinceRedisKey : String = s"DIM:BASE_PROVINCE:$provinceId"
            val provinceStr: String = jedis.get(provinceRedisKey)
            val provinceJsonObj: JSONObject = JSON.parseObject(provinceStr)
            //提取维度数据
            val provinceName: String = provinceJsonObj.getString("name")
            val provinceAreaCode: String = provinceJsonObj.getString("area_code")
            val provinceIsoCode: String = provinceJsonObj.getString("iso_code")
            val provinceIso3166: String = provinceJsonObj.getString("iso_3166_2")
            //补充维度
            dauInfo.province_name = provinceName
            dauInfo.province_area_code  = provinceAreaCode
            dauInfo.province_iso_code = provinceIsoCode
            dauInfo.province_3166_2 = provinceIso3166

            //4.处理日期字段
            val dthr: String = sdf.format(new Date(pageLog.ts))
            val dthrArr: Array[String] = dthr.split(" ")
            val dt: String = dthrArr(0)
            val hr: String = dthrArr(1).split(":")(0)
            dauInfo.dt= dt
            dauInfo.hr = hr

            dauInfo
          }
        )
        MyRedisUtils.close(jedis)
        dauInfoIter
      }
    )
    //dauInfoDStream.print(100)
    //6.写入到es中
    //索引分割
    //索引模板
    //索引别名
    dauInfoDStream.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          dauInfoIter =>{
            val sdf = new SimpleDateFormat("yyyy-MM-dd")
            val sources: List[(String, DauInfo)] = dauInfoIter.map(dauInfo => (dauInfo.mid,dauInfo)).toList
            if(sources.size > 0){
              val dauInfo: DauInfo = sources.head._2
              val ts: Long = dauInfo.ts
              val dt: String = sdf.format(new Date(ts))
              val indexName : String = s"gmall_dau_info_$dt"
              MyEsUtils.save(indexName,sources)
            }
          }
        )
        //7.提交偏移量
      MyOffsetUtils.saveOffset(topic,groupId,offetRanges)
      }
    )


  ssc.start()
  ssc.awaitTermination()
  }

  /**
   * 状态还原
   *
   * 在每次启动实时任务时， 进行一次状态还原。 以ES为准, 将所以的mid提取出来，覆盖到Redis中.
   *
   */
  def reverState() ={

    //从es中查询所有的mid
    val filed : String = "mid"
    val dt : LocalDate = LocalDate.now()
    val indexName : String = s"gmall_dau_info_${dt.toString}"
    val mids : List[String] = MyEsUtils.searchByFiled(indexName,filed)

    //删除redis中的状态
    val jedis: Jedis = MyRedisUtils.getJedis()
    val dauRedisKey : String = s"DAU:${dt.toString}"
    jedis.del(dauRedisKey)
    //还原状态,将es中的mids写入到redis中
    if(mids != null && mids.size>0){
      //批量写入
      val pipeline: Pipeline = jedis.pipelined()
      for (mid <- mids) {
        pipeline.sadd(dauRedisKey,mid)
      }
      pipeline.sync()
    }
    MyRedisUtils.close(jedis)
  }

}
