package com.niit.spark.sparkStreaming

import com.alibaba.fastjson.JSON
import com.niit.spark.sparkStreaming.sparkStreaming3.{Order, Result}
import com.niit.utils.MyKafkaUtils
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.json4s.DefaultFormats
import org.json4s.jackson.Json
import scala.collection.mutable.ListBuffer

object sparkStreaming2 {
  case class Order(isCorrect: String, orderCategory: String, orderData: String, orderName: String, orderNum: Int)

  case class Result(correctNumber: Int, notCorrectNumber: Int)

  def main(args: Array[String]): Unit = {
    val groupId = "192.168.81.133"
    val topicm = "orders"
    val conf = new SparkConf().setAppName("Spark2").setMaster("local[*]")
    val sc = new StreamingContext(conf, Seconds(2))
    val streamRdd = MyKafkaUtils.getKafkaDStream(sc, topicm, groupId)
    sc.sparkContext.setLogLevel("ERROR")

    val lines = streamRdd.map(_.value())
    val orderDS =
      lines
        .filter(i => {
          //转化为Object
          try {
            JSON.parseObject(i, classOf[Order])
            true
          } catch {
            case ex: Exception => false
          }
        })
        .map(i => {
          JSON.parseObject(i, classOf[Order])
        })

    val resultData = orderDS.map(i => {
      (i.isCorrect, 1)
    }).foreachRDD(
      x => {
        val lines = x.reduceByKey(_ + _).collect()
        val value = new ListBuffer[(String, Long)]()
        lines.foreach(x => {
          var map = Map(
            "isCorrect" -> x._1,
            "num" -> x._2
          )
          val res = Json(DefaultFormats).write(map)
          //          println(res)
          MyKafkaUtils.send("streaming2", res)
        })
      })
    sc.start()
    sc.awaitTermination()

  }
}
