package com.atguigu.gmall.realtime.app

import java.text.SimpleDateFormat
import java.time.{LocalDate, Period}
import java.util.Date

import com.alibaba.fastjson.{JSON, JSONObject}
import com.atguigu.gmall.realtime.bean.{DauInfo, PageLog}
import com.atguigu.gmall.realtime.utils.{MyBeanCopy, MyEsUtils, MyKafkaUtils, MyOffsetUtils, MyRedisUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.{Jedis, Pipeline}

import scala.collection.mutable.ListBuffer

/**
 * 日活宽表任务
 *
 * 1. 准备实时环境
 * 2. 从Redis中读取offset
 * 3. 从Kafka中消费数据
 * 4. 提取Offset
 * 5. 处理数据
 *    5.1 转换结构
 *    5.2 去重
 *    5.3 维度关联
 * 6. 写出到ElasticSearch
 * 7. 提交Offset
 *
 */
object DwdDauAPP {
  def main(args: Array[String]): Unit = {
    revertState()
    val conf: SparkConf = new SparkConf().setAppName("DwdDau").setMaster("local[4]")
    val ssc: StreamingContext = new StreamingContext(conf, Seconds(4))
    val topic: String = "DWD_PAGE_TOPIC_0212"
    val groupId: String = "DWD_DAU_GROUP"
    val offset: Map[TopicPartition, Long] = MyOffsetUtils.ReadOffset(topic, groupId)
    var kafkaDStream: InputDStream[ConsumerRecord[String, String]] = null
    if (offset.nonEmpty) {
      kafkaDStream = MyKafkaUtils.GetConsumerDStream(ssc, topic, groupId, offset)
    } else {
      kafkaDStream = MyKafkaUtils.GetConsumerDStream(ssc, topic, groupId)
    }
    var ranges: Array[OffsetRange] = null
    val KfkDstream: DStream[ConsumerRecord[String, String]] = kafkaDStream.transform(
      rdd => {
        ranges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )
    //转换成pageLoge对象
    val pageLogDStream: DStream[PageLog] = KfkDstream.map(
      consumerRecord => {
        val value: String = consumerRecord.value()
        val pageLog: PageLog = JSON.parseObject(value, classOf[PageLog])
        pageLog
      }
    )
    //5.2 去重
    //自我审查: 将页面数据中last_page_id不为空的数据过滤掉.
    pageLogDStream.cache()
    pageLogDStream.foreachRDD(
      rdd => {
        println("自我审查前总数 " + rdd.count())
      }
    )
    val afteFirstFilter: DStream[PageLog] = pageLogDStream.filter(
      pageLog => {
        pageLog.last_page_id == null
      }
    )
    afteFirstFilter.cache()
    afteFirstFilter.foreachRDD(
      rdd => {
        println("自我审查后总数 " + rdd.count())
      }
    )
    //第三方审查: 在Redis中维护今日访问过的mid, 通过自我审查后的结果再到Redis中与维护的mid进行比对，如果已经存在当前页面数据对应的mid,
    //           当前数据就直接扔掉. 反之， 将当前页面数据对应的mid记录到redis中，并将当前页面数据保留下来，最终写入到ES中。
    // Redis中如何存储:
    // type:        set
    // key:      DAU:MID:[date]
    // value:    mid的集合
    // 写入API:   sadd
    val redisFilteDStream: DStream[PageLog] = afteFirstFilter.mapPartitions(
      iteor => {
        val list: List[PageLog] = iteor.toList
        val pageLogList: ListBuffer[PageLog] = ListBuffer[PageLog]()
        println("第三方审查前：" + list.size)
        val jedis: Jedis = MyRedisUtils.get()
        val format: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")

        for (pageLog <- list) {
          val ts: Long = pageLog.ts
          val time: String = format.format(new Date(ts))
          val mid = pageLog.mid

          val key: String = s"DAU:MID:$time"
          if (jedis.sadd(key, mid) == 1L) {
            pageLogList.append(pageLog)
          }
        }
        MyRedisUtils.close(jedis)
        println("第三方审查后: " + pageLogList.size)
        pageLogList.toIterator
      }
    )
    val afterFilterDsteam: DStream[DauInfo] = redisFilteDStream.mapPartitions(
      iteror => {
        val list: List[PageLog] = iteror.toList
        val dauinfos: ListBuffer[DauInfo] = ListBuffer[DauInfo]()
        val format: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
        val jedis: Jedis = MyRedisUtils.get()
        for (pageLog <- list) {
          val dauInfo: DauInfo = new DauInfo()
          //pageLog中已有的字段拷贝到DauInfo中
          MyBeanCopy.copyFieldValue(pageLog, dauInfo)
          //关联用户维度信息
          val user_id: String = pageLog.user_id
          val ts: Long = pageLog.ts
          val jsonString: String = jedis.get("DIM:USER_INFO:" + user_id)
          val jsonObject: JSONObject = JSON.parseObject(jsonString)
          val user_gender: String = jsonObject.getString("gender")
          dauInfo.user_gender = user_gender
          val birthday: String = jsonObject.getString("birthday")
          //年龄换算
          val localDateBirthday = LocalDate.parse(birthday)
          val now: LocalDate = LocalDate.now()
          val period: Period = Period.between(localDateBirthday, now)
          val years: Int = period.getYears
          dauInfo.user_age = years.toString

          //关联地区维度信息
          val province_id: String = pageLog.province_id
          val jsonProvince: String = jedis.get("DIM:BASE_PROVINCE:" + province_id)
          val jsonProvinceObject: JSONObject = JSON.parseObject(jsonProvince)
          val province_name: String = jsonProvinceObject.getString("name")
          val province_iso_code: String = jsonProvinceObject.getString("iso_code")
          val province_3166_2: String = jsonProvinceObject.getString("iso_3166_2")
          val province_area_code: String = jsonProvinceObject.getString("area_code")

          dauInfo.province_name = province_name
          dauInfo.province_3166_2 = province_3166_2
          dauInfo.province_iso_code = province_iso_code
          dauInfo.province_area_code = province_area_code

          //处理日期字段
          val dateString: String = format.format(new Date(pageLog.ts))
          val dtArr: Array[String] = dateString.split(" ")
          dauInfo.dt = dtArr(0)
          dauInfo.hr = dtArr(1).split(":")(0)

          dauinfos.append(dauInfo)
        }

        dauinfos.toIterator
      }

    )
    // afterFilterDsteam.print(100)
    //6. 将处理好的数据写入到ES对应的index中.
    //1. 索引规划:
    // index: gmall_dau_info_date
    // 索引分割 ：一天一个索引
    // 索引别名 ：按照需求指定别名
    // 索引模板 :  "index_patterns": ["gmall_dau_info*"]

    //2. 写入方式:  批量幂等写入

    afterFilterDsteam.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          iteor => {
            val iterator: List[(String, DauInfo)] = iteor.map(dauinfo => {
              (dauinfo.mid, dauinfo)
            }).toList
            if (iterator.size > 0) {
              val dauInfo: DauInfo = iterator.head._2
              val date: String = dauInfo.dt
              val indexName: String = s"gmall_dau_info_$date"
              MyEsUtils.bulkSave(indexName, iterator)
            }
          }
        )
        MyOffsetUtils.SaveOffset(topic, groupId, ranges)
      }
    )


    ssc.start()
    ssc.awaitTermination()
  }

  def revertState() = {
    //直接清空Redis中的mid
    val date: LocalDate = LocalDate.now()
    val jedis: Jedis = MyRedisUtils.get()
    val redisDauKey : String = s"DAU:MID:${date.toString}"
    jedis.del(redisDauKey)

    //从ES中查询所有的mid
    val indexName : String = s"gmall_dau_info_${date.toString}"
    val fieldName : String ="mid"
    val mids: List[String] = MyEsUtils.searchField(indexName , fieldName)
    if(mids != null && mids.size > 0 ){
      /*
      *pipelined是redis的一个管道，要写入的数据先放到管道中 最后在写入到redis中
       */
      val pipeline: Pipeline = jedis.pipelined()
      for (mid <- mids) {
        // jedis.sadd(redisDauKey , mid )
        pipeline.sadd(redisDauKey , mid ) //不会发送到redis
      }
      pipeline.sync()  //真正的到redis执行
    }
  }
}
