package com.huawei.order

import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object OrderConsumer {
    private def updateFunction(newValues: Seq[Int], runningCount: Option[Int]): Option[Int] = {
        val newCount = newValues.sum + runningCount.getOrElse(0)
        Some(newCount)
    }

    def main(args: Array[String]): Unit = {
        val conf = new SparkConf().setAppName("OrderConsumer").setMaster("local[2]")
        val sc = new StreamingContext(conf, Seconds(2))
        sc.sparkContext.setLogLevel("WARN")
        sc.checkpoint(ConstantUtils.CHECKPOINT_PATH)

        val kafkaParams = Map(
            ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> ConstantUtils.METADATA_BROKER_LIST_VALUE,
            ConsumerConfig.GROUP_ID_CONFIG -> ConstantUtils.GROUP_ID,
            ConsumerConfig.MAX_POLL_RECORDS_CONFIG -> ConstantUtils.MAX_POLL,
            ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
            ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer]
        )

        // 读取kafka数据
        val kafkaTopicDS = KafkaUtils.createDirectStream(sc, LocationStrategies.PreferConsistent, ConsumerStrategies.Subscribe[String, String](Set(ConstantUtils.ORDER_TOPIC), kafkaParams))

        //  将读入数据切分为（商品，价格）的形式进行统计
        val pair = kafkaTopicDS.map(_.value()).map(_.split(" ")).map(x => (x(0), x(1).toInt))
        // 统计程序启动以商品总销售量，updateStateByKey将按商品名称统计总销售信息
        pair.updateStateByKey(updateFunction).print()

        //  每隔2秒统计近4秒钟各类商品销售情况
        pair.reduceByKey(_ + _).window(Seconds(4), Seconds(2)).print()

        // 启动应用
        sc.start()
        sc.awaitTermination()
    }
}
