package com.imooc.spark.streaming.app

import com.imooc.spark.streaming.dao.OrderSaleMySqlDao
import com.imooc.spark.streaming.entity.{OrderSaleAmount, OrderSaleVolume, UserOrderLog}
import com.imooc.spark.streaming.utils.DateUtils
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ListBuffer

/**
  * http://spark.apache.org/docs/latest/streaming-kafka-0-10-integration.html
  *
  * @description Spark Streaming + Kafka-0.10集成实现实时日志处理（微批处理）
  * @author yuyon26@126.com
  * @date 2018/10/27 0:19
  */
object KafkaStreamingWordCountApp {

  def main(args: Array[String]): Unit = {

    if (args.length != 4) {
      System.err.println("Usage: KafkaStreamingWordCountTest <bootstrapServers> <groupId> <topics> <checkpointDirectory>")
      System.exit(1)
    }

    val Array(bootstrapServers, groupId, topics, checkpointDirectory) = args

    val ssc = StreamingContext.getOrCreate(checkpointDirectory, () => functionToCreateContext(bootstrapServers, groupId, topics, checkpointDirectory))

    ssc.sparkContext.setLogLevel("ERROR")
    ssc.start()
    ssc.awaitTermination()
  }

  def functionToCreateContext(bootstrapServers: String, groupId: String, topics: String, checkpointDirectory: String): StreamingContext = {
    /**
      * 对于速率限制，可以使用Spark配置变量
      * spark.streaming.kafka.maxRatePerPartition来设置每个分区的每个分区的最大消息数。
      */
    val sparkConf = new SparkConf().setMaster("local[5]").setAppName("KafkaStreamingWordCountTest")
      .set("spark.streaming.kafka.maxRatePerPartition", "1000")
    val ssc = new StreamingContext(sparkConf, Seconds(1))
    ssc.checkpoint(checkpointDirectory)

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> bootstrapServers,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> groupId,
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (true: java.lang.Boolean),
      "auto.commit.interval.ms" -> "3000"
    )

    val message = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics.split(",").toArray, kafkaParams)
    )

    //TODO:这里就可实现reduceByKey
    //line:a6b48be5-5fee-4189-9604-7e708ba7bf28	25.99.73.152	2018-11-06 09:57:51	3199	5	1004
    val cleanData = message.map(record => record.value()).map(line => {
      val infos = line.split("\t")
      val uid = infos(0)
      //根据id可以解析出地区
      val ip = infos(1)
      val time = infos(2)
      val price = infos(3).toFloat
      val num = infos(4).toInt
      val company = infos(5)
      UserOrderLog(uid, ip, DateUtils.parse(time), price, num, company)
    })
    cleanData.cache()

    //统计销售额
    cleanData.map(data => {
      (data.time.substring(0, 8) + "_" + data.company, data.price * data.num)
    }).reduceByKey(_ + _)
      .foreachRDD(rdd => {
        rdd.foreachPartition(partition => {
          val list = new ListBuffer[OrderSaleVolume]
          partition.foreach(record => {
            val day_company = record._1.split("_")
            list.append(OrderSaleVolume(day_company(1), day_company(0), record._2))
          })
          //TODO：保存数据到hbase
          OrderSaleMySqlDao.insertOrderSaleVolumeBatch(list)
        })
      })
    //销售量统计
    cleanData.map(data => {
      (data.time.substring(0, 8) + "_" + data.company, 1)
    }).reduceByKey(_ + _)
      .foreachRDD(rdd => {
        rdd.foreachPartition(partition => {
          val list = new ListBuffer[OrderSaleAmount]
          partition.foreach(record => {
            val day_company = record._1.split("_")
            list.append(OrderSaleAmount(day_company(1), day_company(0), record._2))
          })
          //TODO：保存数据到hbase
          OrderSaleMySqlDao.insertOrderSaleAmountBatch(list)
        })
      })
    cleanData.print(3)

    ssc
  }

}
