package com.mjf.gmall.realtime.app

import java.lang
import java.text.SimpleDateFormat
import java.util.Date

import scala.collection.mutable.ListBuffer
import com.alibaba.fastjson.{JSON, JSONObject}
import com.mjf.gmall.realtime.bean.DauInfo
import com.mjf.gmall.realtime.util.{MyEsUtil, MyKafkaUtil, OffsetManager, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

/**
 * 每日活跃用户(daily active users)
 *
 * 技术栈：
 *  zookeeper/kafka/redis
 */
object DauApp {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[4]").setAppName("DauApp")

    val ssc: StreamingContext = new StreamingContext(conf, Seconds(5))

    val topic = "gmall_startup"
    val groupId = "dau_group"

    // 从redis读取offset
    val kafkaOffsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(topic, groupId)

    // 从Kafka消费数据
    var recordInputStream: InputDStream[ConsumerRecord[String, String]] = null
    if (kafkaOffsetMap == null && kafkaOffsetMap.size >0) {
      // 如果redis中已经有了offset信息
      recordInputStream = MyKafkaUtil.getKafkaStream(topic, ssc, kafkaOffsetMap, groupId)
    } else {
      recordInputStream = MyKafkaUtil.getKafkaStream(topic, ssc)
    }

    //获得偏移结束点
    var offsetRanges: Array[OffsetRange] = Array.empty[OffsetRange]  // 写在算子外面为启动时执行
    val inputGetOffsetDstream: DStream[ConsumerRecord[String, String]] = recordInputStream.transform {
      rdd =>
        // 获得本批次的偏移量的结束位置，用于更新redis中的偏移量
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges  // 写在算子里面是周期性执行（driver端）
      rdd
    }


    //    recordInputStream.map(_.value()).print()

    // 将字符串转化为Json对象
    val jsonObjDStream: DStream[JSONObject] = inputGetOffsetDstream.map(
      record => {
        val jsonString: String = record.value()
        val jsonObj: JSONObject = JSON.parseObject(jsonString)

        val ts: lang.Long = jsonObj.getLong("ts")
        val dateHourString: String = new SimpleDateFormat("yyyy-MM-dd HH").format(new Date(ts))
        val dateHourArr: Array[String] = dateHourString.split(" ")

        // 将解析完的时间字段放入 Json 对象
        jsonObj.put("dt", dateHourArr(0))
        jsonObj.put("hr", dateHourArr(1))

        jsonObj
      }
    )

    // 去重思路：利用redis保存今天访问过系统的用户清单
    // redis：type->set   key->dau:2020-06-17   value->mid   (field ?  score ?   expire->24小时)
/*    val filteredDStream: DStream[JSONObject] = jsonObjDStream.filter(
      jsonObj => {
        val dt: String = jsonObj.getString("dt")
        val mid: String = jsonObj.getJSONObject("common").getString("mid")
        val dauKey = "dau:" + dt

        val jedis: Jedis = RedisUtil.getJedisClient
        val isNew: lang.Long = jedis.sadd(dauKey, mid) // 如果不存在则保存，否则不保存
        jedis.close()

        if (isNew == 1L) true else false
      }
    )*/

    // 优化以上代码   利用redis保存今天访问过系统的用户清单
    val filteredDStream: DStream[JSONObject] = jsonObjDStream.mapPartitions {
      jsonObjIter =>    // Iterator对象只能迭代一次，toIterable可以随便迭代
        val jsonList: List[JSONObject] = jsonObjIter.toList   // 改成List可以迭代多次
//        println("过滤前：" + jsonList.size)
        val jedis: Jedis = RedisUtil.getJedisClient // 一个分区只申请一个连接
        val filteredList = new ListBuffer[JSONObject]()

        for (jsonObj <- jsonList) {
          val dt: String = jsonObj.getString("dt")
          val mid: String = jsonObj.getJSONObject("common").getString("mid")
          val dauKey = "dau:" + dt

          val isNew: lang.Long = jedis.sadd(dauKey, mid) // 如果不存在则保存，否则不保存
          jedis.expire(dauKey, 3600 * 24)   // 设置过期时间

          if (isNew == 1L) filteredList += jsonObj
        }

        jedis.close()
//        println("过滤后：" + filteredList.size)
        filteredList.toIterator
    }

    filteredDStream.foreachRDD{
      rdd =>
        rdd.foreachPartition{
          jsonIter =>{
            val list: List[JSONObject] = jsonIter.toList
            // 将原数据转换为要保存的数据格式
            val dauList: List[(String, DauInfo)] = list.map {
              jsonObj =>
                val commonObj: JSONObject = jsonObj.getJSONObject("common")
                val dauInfo: DauInfo = DauInfo(
                  commonObj.getString("mid"),
                  commonObj.getString("uid"),
                  commonObj.getString("ar"),
                  commonObj.getString("ch"),
                  commonObj.getString("vc"),
                  jsonObj.getString("dt"),
                  jsonObj.getString("hr"),
                  "00",
                  jsonObj.getLong("ts")
                )

                (dauInfo.mid, dauInfo)
            }

            val dt: String = new SimpleDateFormat("yyyy-MM-dd").format(new Date())
            MyEsUtil.bulkDoc(dauList, "gmall0105_dau_info" + dt)  // 将数据插入到es中
          }
        }

        // 手动提交偏移量，以topic为单位进行提交，包含所有分区
        OffsetManager.saveOffset(topic, groupId, offsetRanges)

    }

    ssc.start()
    ssc.awaitTermination()

  }
}
