package com.jnpc.spark

import com.jnpc.spark.BatchProcessDataOnYARN.CRReport
import com.jnpc.spark.project.dao.FaMenGuzhangCountDAO
import com.jnpc.spark.project.domain.{FamenGuzhangCount, FamenGuzhangItems}
import com.jnpc.spark.project.utils.{FamenUtil, NLPUtil}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ListBuffer

/**
  * Spark Streaming对接Kafka的方式
  */
object KafkaReceiverRealTime {

  def main(args: Array[String]): Unit = {
    if(args.length != 4) {
      System.err.println("Usage: KafkaReceiverRealTime <zkQuorum> <group> <topics> <numThreads>")
    }
    val Array(zkQuorum, group, topics, numThreads) = args
    val sparkConf = new SparkConf()
    val ssc = new StreamingContext(sparkConf, Seconds(5))

    val topicMap = topics.split(",").map((_, numThreads.toInt)).toMap

    val messages = KafkaUtils.createStream(ssc, zkQuorum, group,topicMap)
    val crrdd =  messages.map(_._2).map(_.split("@"))
      .map(line =>
        CRReport(line(0), line(1), line(2),line(3), line(4), line(5),
          line(6), line(7), line(8))
      )
      .filter(_.cr_subject.contains("阀门"))
      .filter(!_.cr_date.equals("null"))

    println("crrdd.count()=="+crrdd.count())
    val finalrdd= crrdd.map(x=>{

      val fc=new NLPUtil().getFenciResult(x.cr_subject)
      val fctype=FamenUtil.getType(fc)
      println("fc==="+fc)
      println("fctype==="+fctype)
      (x,fctype)
    }).filter(!_._2.equals(""))


    finalrdd.cache()
      .map(x=>{
        (x._1.cr_date.replace("-","") + "_" + x._2,  1)
      }).foreachRDD(rdd => {
      rdd.foreachPartition(partitionOfRecords => {
        val list = new ListBuffer[FamenGuzhangCount]
        partitionOfRecords.foreach(pair => {
          list.append(FamenGuzhangCount(pair._1, pair._2))
        })
        FaMenGuzhangCountDAO.save(list)
      })
    })

    finalrdd.map(x=>{
      (x._1.cr_date.replace("-","") + "_" + x._2,  x._1.id_key_cr)
    }).foreachRDD(
      rdd => {
        rdd.foreachPartition(partitionOfRecords => {
          val list = new ListBuffer[FamenGuzhangItems]
          partitionOfRecords.foreach(pair => {
            list.append(FamenGuzhangItems(pair._1, pair._2))
          })
          FaMenGuzhangCountDAO.saveItems2(list)
        })
      }
    )


    ssc.start()
    ssc.awaitTermination()
  }
}
