package cn.tecnova.analysis

import java.sql.{Connection, DriverManager, PreparedStatement, ResultSet}
import java.text.SimpleDateFormat
import java.util.{Date, UUID}

import cn.tecnova.bean.{BaAnalysisBean, BaArticle, EventRelationArticle, NlpJsonBean}
import cn.tecnova.utils.{BroadcastKafkaProducer, BroadcastTools, ConfigHandler}
import com.alibaba.fastjson.JSON
import com.google.gson.Gson
import org.apache.commons.lang.StringUtils
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, broadcast}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.util.control.Breaks

/**
  * description:舆情事件相关文章_分析
  **/
object EventRelationArticleV4 {

  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("spark.streaming.kafka.maxRatePerPartition", args(0))
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[NlpJsonBean]))

    val ssc = new StreamingContext(conf, Seconds(args(1).toInt))

    //获取到媒体数据并将数据进行广播
    val mediaMap: Map[String, String] = BroadcastTools.getMediaDic()
    val mediaDic: broadcast.Broadcast[Map[String, String]] = ssc.sparkContext.broadcast(mediaMap)

    val groupid = "g_eventrelation"

    val data: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent, // 将拉取到的数据，均匀分散到每台Executor节点上
      ConsumerStrategies.Subscribe[String, String](Array(ConfigHandler.topic), ConfigHandler.kafkaParams(groupid))
    )

    //kafka生产者
    val kafkaBro = ssc.sparkContext.broadcast(BroadcastKafkaProducer[String, String](ConfigHandler.kafkaProps))

    data.foreachRDD(rdd => {

      //获取当前批次的偏移量信息
      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      //如果当前批次有数据，才继续以下操作
      if (!rdd.isEmpty()) {

        //将json数据转换成class
        val jsRDD: RDD[NlpJsonBean] = rdd.map(re => {
          JSON.parseObject(re.value(), classOf[NlpJsonBean])
        })

        jsRDD.foreachPartition(iter => {

          var conn: Connection = null
          var sentimentEvent: PreparedStatement = null

          val gson = new Gson()
          try {

            conn = DriverManager.getConnection(ConfigHandler.url, ConfigHandler.user, ConfigHandler.passwd)

            iter.foreach(js => {

              sentimentEvent = conn.prepareStatement(
                """
              select
              id,user_id,article_id,article_title,keywords
              from public_sentiment_event
              where run_flag = 1 and del_flag = 0 and start_time <= ? and end_time >= ?
            """.stripMargin)

              val currentTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date)
              sentimentEvent.setString(1, currentTime)
              sentimentEvent.setString(2, currentTime)
              val sentimentEventSet: ResultSet = sentimentEvent.executeQuery()

              //媒体类型
              val mediaCls: String = js.media_cls
              //文章内容
              var articleContent: String = js.article_content

              //遍历
              while (sentimentEventSet.next()) {
                //userId
                val userId: String = sentimentEventSet.getString("user_id")
                //事件ID
                val eventId: String = sentimentEventSet.getString("id")
                //事件名称
                val eventName = sentimentEventSet.getString("article_title")
                //取出keywords分组
                val keyWords = sentimentEventSet.getString("keywords")

                //判空
                if (StringUtils.isNotEmpty(keyWords)) {

                val keyWordsArr: Array[String] = keyWords.split(" ")

                val loop = new Breaks
                var hitWords = ""

                //为了跳出循环
                loop.breakable {
                  //拿出一组数据判断是否都包含
                  for (group <- keyWordsArr) {

                    var wordList = List[Int]()
                    val words: Array[String] = group.split(",")

                    //判断每个词是否包含在文章
                    for (w <- words) {
                      val flag: Int = if (articleContent.contains(w)) 1 else 0
                      wordList :+= flag
                    }

                    //表示都包含,则跳出循环
                    if (!wordList.contains(0)) {

                      //将匹配到的关键词加上颜色标签
                      for (w <- words) {

                        articleContent = articleContent.replaceAll(w, "<span class=\"weight\">" + w + "</span>")

                      }

                      hitWords = group.replaceAll(" ", ",")

                      val id = UUID.randomUUID().toString.replaceAll("-", "")
                      val update_time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date)


                      val baAnalysisBean = BaAnalysisBean(id, js.site_id, js.template_source, js.uuid, js.site_name, js.site_url, js.domain, js.weight, js.main_id, js.site_cls, js.enterprise_scale_cls, js.customer_cls, mediaDic.value.getOrElse(mediaCls, mediaCls), js.content_cls, js.important_leavel, js.industry_cls, js.language_cls, js.area_cls, js.site_province, js.site_city, js.site_district, js.schedule, js.article_url, js.code, js.article_html, js.article_channel, js.article_title, js.article_author, js.article_source, js.article_pubdate, js.article_view, js.article_reply, articleContent, js.article_like, js.article_forword, js.gmt_create, js.nlp_emotion, " ", " ", " ", " ", " ", hitWords, " ", userId, " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", eventId, eventName, " ", " ", " ", " ", update_time, " ", " ", " ", " ", " ", " ", "ba_event_relation_article")

                      val value = gson.toJson(baAnalysisBean)
                      println(value)
                      kafkaBro.value.send("ba_event_relation_article", UUID.randomUUID().toString.replaceAll("-", ""), value)
                      kafkaBro.value.send("ba_article", UUID.randomUUID().toString.replaceAll("-", ""), gson.toJson(BaArticle(js.uuid, js.article_title, js.article_content, js.nlp_emotion, "ba_article")))
                      //跳出循环
                      loop.break()

                    }

                  }
                }


              }

              }

            })

          } catch {

            case e: Exception => e.printStackTrace()

          } finally {
            if (sentimentEvent != null) sentimentEvent.close()
            if (conn != null) conn.close()
          }

        })
      }

      //提交偏移量信息
      data.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)

    })

    ssc.start()
    ssc.awaitTermination()

  }

}
