package cn.tecnova.analysis

import java.text.SimpleDateFormat
import java.util.{Date, UUID}

import cn.tecnova.bean._
import cn.tecnova.utils.{BroadcastKafkaProducer, ConfigHandler}
import com.alibaba.fastjson.{JSON, JSONObject}
import com.google.gson.Gson
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * description:用户预警信息库_分析
  *
  * 输入：业务分析流-用户舆情信息库（ba_public_sentiment）
  * 分析过程：将type=0的数据自动输出到用户预警信息库，预警方式默认为0，预警标题默认为文章标题，预警内容默认为文章内容，已读标识默认为0，创建时间默认为当前系统时间。
  * 输出：用户预警信息库（ba_user_warning）
  **/
object BaUserWarningAnalysisV2 {

  //屏蔽日志
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("spark.streaming.kafka.maxRatePerPartition", args(0))
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[BaAnalysisBean], classOf[BaUserWarning]))

    val ssc = new StreamingContext(conf, Seconds(args(1).toInt))

    //消费者组id
    val groupid = "g_bauserwarning"

    //获取取kafka上的流数据
    val datas: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent, //将拉取到的数据，均匀分散到每台Executor节点上
      ConsumerStrategies.Subscribe[String, String](Array("ba_public_sentiment"), ConfigHandler.kafkaParams(groupid))
    )

    //kafka生产者
    val kafkaBro = ssc.sparkContext.broadcast(BroadcastKafkaProducer[String, String](ConfigHandler.kafkaProps))

    datas.foreachRDD(rdd => {

      //获取当前批次偏移量信息
      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      rdd.foreachPartition(iter=>{

        val gson = new Gson()

        iter.foreach(record=>{

          //解析kafka数据，取出public_sentiment_type字段
          val jsonObj: JSONObject = JSON.parseObject(record.value())
          val psType = jsonObj.getString("public_sentiment_type")

          //过滤出public_sentiment_type字段等于0的数据
          if("0".equals(psType)){

            //取出userid，文章标题，文章内容
            val userId = jsonObj.getString("user_id")
            val articleTitle = jsonObj.getString("article_title")
            val articleContent = jsonObj.getString("article_content")

            //将数据整理成json字符串
            val userWarning = BaUserWarning(UUID.randomUUID().toString.replaceAll("-", ""), userId, "0", articleTitle, articleContent, "0", new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date), "ba_user_warning")
            val value = gson.toJson(userWarning)

            //数据发送到kafka
            kafkaBro.value.send("ba_user_warning", UUID.randomUUID().toString.replaceAll("-", ""), value)

          }
        })

      })

      //提交偏移量信息
      datas.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)

    })

    ssc.start()
    ssc.awaitTermination()

  }

}
