package cn.itcast.xc.analysis

import java.util.Date

import cn.itcast.xc.common.EtlEnvironment
import cn.itcast.xc.utils.DateUtils.getDateStr
import cn.itcast.xc.utils.RedisUtils
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * <p>
 * 今日购买量-实时分析
 * </p>
 **/
object RealTimeBuy {
  // 初始化sparkConf
  val sparkConf: SparkConf = EtlEnvironment.getSparkConf(this.getClass.getSimpleName)

  def main(args: Array[String]): Unit = {
    // 获取spark context
    val sc = new SparkContext(sparkConf)

    // kafka协议
    val bootstrapServers = "xc-online-kafka:9092"
    val groupId = "course_buy"
    val topicName = "course_buy"
    // redis中访问量key
    val rt_cv_key = "today::course_buy::current"
    // redis中kafka offset key
    val rt_cv_offset = "today::course_buy::offset"

    //    1. 手动管理kafka的offset（redis中获取保存的offset, 如果没有从0开始）
    // 初始化redis
    val redis = new RedisUtils
    redis.initRedisPool()
    // 获取offset
    val offsets = redis.getLastCommittedOffsets(rt_cv_offset, topicName, 1)

    //    2. Spark Streaming  获取kafka中的用户行为数据, 每5秒获取一次
    // 获取Spark Streaming
    val ssc = new StreamingContext(sc, Seconds(5))
    // 增加检查点， 共享数据
    ssc.checkpoint("/user/hive/external/data_course/today_course_buy/checkpoint")
    // 配置kafka参数
    val kafkaParam = Map[String, Object](
      // bootstrapServers
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> bootstrapServers,
      // groupId
      ConsumerConfig.GROUP_ID_CONFIG -> groupId,
      // k v序列化方式
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer]
    )
    // 获取kafka中的数据
    val message = KafkaUtils.createDirectStream[String, String](
      // Spark Streaming
      ssc,
      // 保存一致性配置
      LocationStrategies.PreferConsistent,
      // kafka参数
      ConsumerStrategies.Assign[String, String](offsets.keys.toList, kafkaParam, offsets)
    )

    //    3. 根据订单id, 实时计算今日购买量
    val courseBuy = message.map(_.value())
      .map(x => (x, 1))

    // 共享变量， spark中进行共享
    val accumulator = sc.longAccumulator("courseBuy")
    // 组装redis hash字段
    val keyInfo = "course_buy_date_" + getDateStr(new Date().getTime, "yyyy-MM-dd")
    // 查看原来是否有值
    val valStr = redis.getResultRedis(rt_cv_key, keyInfo)
    if (null != valStr) {
      accumulator.add(valStr.toLong)
    }

    //    4. 统计结果存redis, 供业务使用
    courseBuy.foreachRDD(iter => {
      if (iter.count() > 0) {
        // 共享变量赋值
        accumulator.add(iter.count())
        // redis赋值
        redis.storeResultRedis(rt_cv_key, keyInfo, accumulator.value.toString)
      }
    })

    //    5. redis中保存当前offset进度
    message.foreachRDD(rdd => {
      // 判断rdd是否有值
      if (!rdd.isEmpty()) {
        // 获取分区
        val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        // 遍历分区
        offsetRanges.foreach(offsetRange => {
          // 获取分区相关值， 组装hash 字段 值
          val topic_partition_key = offsetRange.topic + "_" + offsetRange.partition
          // 进行保存
          redis.storeOffsetRedis(rt_cv_offset, topic_partition_key, offsetRange.untilOffset)
        })
      }
    })

    // 启动
    ssc.start()
    // 等待结束
    ssc.awaitTermination()

  }

}