package cn.tecnova.analysis

import java.sql.{Connection, DriverManager, PreparedStatement}
import java.util.UUID

import cn.tecnova.bean.{BaSubjectNlp, BaSubjectNlpRes}
import cn.tecnova.utils.{BroadcastKafkaProducer, ConfigHandler, JedisPools}
import com.google.gson.Gson
import org.apache.commons.lang.StringUtils
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * description:用到mysql用户表（user）
  **/
object BaSubjectNlpAnalysis3 {

  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("spark.streaming.kafka.maxRatePerPartition", args(0))
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[BaSubjectNlp],classOf[BaSubjectNlpRes]))

    val ssc = new StreamingContext(conf, Seconds(args(1).toInt))

    //消费者组id
    val groupid = "g_basubjectnlpanalysis3"

    //获取kafka上的流数据
    val datas: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent, //将拉取到的数据，均匀分散到每台Executor节点上
      ConsumerStrategies.Subscribe[String, String](Array("nlp_subject_analysis"), ConfigHandler.kafkaParams(groupid))
    )

    //kafka生产者
    val kafkaBro = ssc.sparkContext.broadcast(BroadcastKafkaProducer[String, String](ConfigHandler.kafkaProps))

    datas.foreachRDD(rdd => {

      //获取当前批次偏移量信息
      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      if (!rdd.isEmpty()) {

        //将json数据转换成class 并过滤数据
        val jsRDD: RDD[BaSubjectNlp] = rdd.map(record => {
          val gson = new Gson()
          gson.fromJson(record.value(), classOf[BaSubjectNlp])
        }) filter (ba => {
          "5".equals(ba.subject_type)
        })

        jsRDD.foreachPartition(iter => {

          var conn: Connection = null
          var ps: PreparedStatement = null

          val gson = new Gson()
          try {

            conn = DriverManager.getConnection(ConfigHandler.url, ConfigHandler.user, ConfigHandler.passwd)

            iter.foreach(ba => {

              ps = conn.prepareStatement(
                """
                  select id,industry_id
                  from
                  user
                """.stripMargin)
              val resultSet = ps.executeQuery()

              while (resultSet.next()) {

                //userId
                val userId: String = resultSet.getString("id")
                //industry_id
                val industryId = resultSet.getString("industry_id")

                //如果industryId不为空或null
                if (StringUtils.isNotEmpty(industryId)) {

                  val industryIdArr = industryId.split(",")

                  //如果subject_id in (用户所属行业industry_id）
                  if (industryIdArr.contains(ba.subject_id)) {

                    val id = UUID.randomUUID().toString.replaceAll("-", "")
                    val subjectNlpRes = BaSubjectNlpRes(id,userId, ba.subject_id, ba.subject_name, ba.subject_type, ba.sentiment_analysis, ba.hot_words, ba.positive_word_cloud, ba.negative_word_cloud, ba.relation_graph, ba.update_time, "ba_subject_nlp_analysis")
                    //将类转成json数据
                    val json = gson.toJson(subjectNlpRes)

                    //将数据写到kafka
                    kafkaBro.value.send("ba_subject_nlp_analysis", UUID.randomUUID().toString.replaceAll("-", ""), json)
                    println(json)

                  }

                }

              }

            })

          } catch {

            case e: Exception => e.printStackTrace()

          } finally {

            if (ps != null) ps.close()
            if (conn != null) conn.close()

          }

        })

      }

      //提交偏移量信息
      datas.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)

    })
    ssc.start()
    ssc.awaitTermination()

  }

}
