package com.niit.sparkConsumer


import com.niit.sparkAnalyze.sparkRDD.EachCategoryValidNum
import com.niit.sparkAnalyze.sparkSQL.sparkSql
import com.niit.sparkAnalyze.sparkStreaming.ValidNum.processOrders
import com.niit.sparkAnalyze.sparkStreaming.orderCounts.orderCounts
import com.niit.sparkAnalyze.sparkStreaming.{ProductNum, orderNum}
import com.niit.utils._
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils}


object SparkConsumer {
  def getDataFromKafka(): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    Logger.getLogger("akka").setLevel(Level.WARN)
    val sc = SparkConnector.getSparkConf()
    val group = "spark_consumer111"
    val topic = "orders"
    val kafkaParam = Map[String, Object](
      "bootstrap.servers" -> "192.168.244.129:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> group,
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val lineStream = KafkaUtils.createDirectStream(sc, PreferConsistent, ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParam))
    val line = lineStream.map(_.value())
    line.print()

    //core code
    val orders = lineStream.map(record => {
      val fields = record.value().split("\t")
      val category = fields(0)
      val itemName = fields(1)
      val quantity = fields(2).toInt
      val date = fields(3)
      val isValid = fields(4)
      (category, itemName, quantity, date, isValid)
    })
    var valid_res,sql_res,order_count, each_category_valid_num,order_num,product_num= ""
    orders.foreachRDD {
      rdd =>
        //有效数量 sparkStreaming
        valid_res = processOrders(rdd)
        //物品种类数量 (根据物品种类进行聚合) sparkStreaming
        order_count = orderCounts(rdd)
        //spark需求3各个类别有效无效的数量
        order_num = orderNum.orderNum(rdd)
        //sparkRDD
        each_category_valid_num = EachCategoryValidNum.eachCategoryValidNum(rdd)
        //SparkSQL
        sql_res = sparkSql.sparkSql(rdd)
        //ProductNum
        product_num = ProductNum.productNum(rdd)
        if (!rdd.isEmpty()) {
          //写入kafka
          //传k-v值
          kafkaUtil.writer("valid_num",valid_res)
          kafkaUtil.writer("order_counts",order_count)
          kafkaUtil.writer("order_num",order_num)
          kafkaUtil.writer("each_category_valid_num",each_category_valid_num)
          kafkaUtil.writer("product_num",product_num)
          kafkaUtil.writer("sql_res",sql_res)
        }
    }
    sc.start()
    sc.awaitTermination()
  }
  //测试入口要注释掉
}
