package org.yonggan.shop.rdd.rt

import com.alibaba.fastjson.JSON
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, KafkaUtils, LocationStrategies}
import org.slf4j.LoggerFactory
import org.yonggan.shop.constant.ConfigurationManager
import org.yonggan.shop.domain.LogJson
import org.yonggan.shop.utils.{RTCalculateUtils, SparkUtils}

/**
  * 实时统计销售总金额
  *
  * 需求
  *   1 统计总销售金额；从今天凌晨到目前的销售金额（Redis）
      2 按照分类进行累加销售金额（Redis）
      3 按照省份进行累加，并且按照省份销售金额的从高到低进行排序（Redis）
  *
  */
object RTShopIncomeStatistics {

  // 日志
  private val LOGGER = LoggerFactory.getLogger(RTShopIncomeStatistics.getClass)


  def main(args: Array[String]): Unit = {

    val conf = SparkUtils.getSparkConf("实时统计销售总金额")
    conf.set("spark.streaming.kafka.maxRatePerPartition", "10")

    val ssc = new StreamingContext(conf, Seconds(30))
    ssc.sparkContext.setLogLevel("WARN")

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> ConfigurationManager.KFK_SERVERS,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> ConfigurationManager.KFK_GROUPID,
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = ConfigurationManager.KFK_TOPIC

    //提取出Kfk数据
    val kfkDS: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )

    // 提取 kfk 中数据
    kfkDS.foreachRDD(kfkRdd => {

      if (!kfkRdd.isEmpty()) {
        //提取偏移量
        val offsetRanges = kfkRdd.asInstanceOf[HasOffsetRanges].offsetRanges
        // 提取分区数据
        val filteredRdd: RDD[LogJson] = kfkRdd.map {
          cr =>
            val json = cr.value()
            var logJson: LogJson = null
            try {
              logJson = JSON.parseObject(json, classOf[LogJson])
            }
            catch {
              case e: Exception =>
            }
            logJson
        }.filter(_ != null).cache()

        // 过滤出来的数据
        // 实时总金额
        RTCalculateUtils.calculateTotalMoney(filteredRdd)
//        2 按照分类进行累加销售金额（Redis）
        RTCalculateUtils.calculateCategoryTotalMoney(filteredRdd)
//        3 按照省份进行累加，并且按照省份销售金额的从高到低进行排序（Redis）
        RTCalculateUtils.calculateProvinceTotalMoney(filteredRdd)

        kfkDS.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)
      }
    })

    ssc.start()
    ssc.awaitTermination()

  }

}
