package cn.tecnova.analysis

import java.text.SimpleDateFormat
import java.util.{Date, UUID}

import cn.tecnova.bean.BaAnalysisBean
import cn.tecnova.utils.{BroadcastKafkaProducer, ConfigHandler, ESUtils}
import com.alibaba.fastjson.{JSON, JSONObject}
import com.google.gson.Gson
import org.apache.commons.lang.StringUtils
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.elasticsearch.client.transport.TransportClient
import org.elasticsearch.index.query.QueryBuilders
import org.elasticsearch.search.SearchHit

/**
  * description:用户相关文章库分析_分析
  **/
object BaUserRelationArticle {

  //屏蔽日志
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {

    System.setProperty("es.set.netty.runtime.available.processors", "false")

    val conf = new SparkConf()
      .setAppName(this.getClass.getSimpleName)
      //      .setMaster("local[*]")
      .set("spark.streaming.kafka.maxRatePerPartition", args(0))
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[BaAnalysisBean]))

    val ssc = new StreamingContext(conf, Seconds(args(1).toInt))

    val topics: Array[String] = ConfigHandler.baUserRelationArticleTopic.split(",")

    //消费者id
    val groupid = "g_bauserrelationarticleV3"

    //获取取kafka上的流数据
    val datas: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent, //将拉取到的数据，均匀分散到每台Executor节点上
      ConsumerStrategies.Subscribe[String, String](topics, ConfigHandler.kafkaParams(groupid))
    )
    //kafka生产者
    val kafkaBro = ssc.sparkContext.broadcast(BroadcastKafkaProducer[String, String](ConfigHandler.kafkaProps))

    datas.foreachRDD(rdd => {

      //获取当前批次偏移量信息
      val offsetRanges: Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      if (!rdd.isEmpty()) {

      //检索到有记录的数据后输出到kafka
      rdd.coalesce(4).foreachPartition(iter => {

        //获取es客户端
        val client = ESUtils.getEsClient()

        try {

          //获取es客户端
          val gson = new Gson()

          iter.foreach(record => {

            val jsonObj: JSONObject = JSON.parseObject(record.value())
            //从json数据中拿出userid和uuid
            val userId = jsonObj.getString("user_id")
            val uuid = jsonObj.getString("uuid")

            //如果userid和uuid不为空，就去es中检索
            if(StringUtils.isNotEmpty(userId) && StringUtils.isNotEmpty(uuid)){

              //用uuid和user_id去es索引库中检索数据
              val builder = client.prepareSearch(ConfigHandler.esIndex).setTypes(ConfigHandler.esType)
              val builder1 = QueryBuilders.matchPhraseQuery("user_id", userId)
              val builder2 = QueryBuilders.matchPhraseQuery("uuid", uuid)

              val sr = builder.setQuery(QueryBuilders.boolQuery()
                .must(builder1)
                .must(builder2))
                .execute()
                .actionGet()
              val hits: Array[SearchHit] = sr.getHits.getHits


              //es中没有该记录 就写到kafka
              if (hits.length == 0) {
                println("-------------------------------")
                println("ES中没有该数据，数据将写到kafka...")
                val baAnalysisBean: BaAnalysisBean = gson.fromJson(record.value(), classOf[BaAnalysisBean])
                baAnalysisBean.topic_name = "ba_user_relation_article"
                val value: String = gson.toJson(baAnalysisBean)
                kafkaBro.value.send("ba_user_relation_article", UUID.randomUUID().toString.replaceAll("-", ""), value)
                println("-------------------------------")
                println("数据写入kafka成功...")
              }else {
                println("-------------------------------")
                println("Es中存在该数据...")
              }

            }

          })

        } catch {

          case e: Exception => e.printStackTrace()

        } finally {
          //释放资源
          if (client != null) client.close()

        }

      })

    }

      //提交偏移量信息
      datas.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)

    })

    ssc.start()
    ssc.awaitTermination()

  }

}
