package com.central.monitor.location

import java.util.Properties

import com.alibaba.fastjson.JSON
import com.central.face.streaming.TransData
import com.central.monitor.redis.{MonitorRedisMethod}
import com.central.test.KafkaSink
import com.central.utils.{ConfigUtil, ParseTime}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object LocationMonintorStreaming {


  def main(args: Array[String]): Unit = {

    val configs = ConfigUtil.getProperties()
    val KAFKA_SERVERS = configs.getProperty("bootstrap.servers")
    val KAFKA_GROUPID = configs.getProperty("group.id")
    val OFFSET = configs.getProperty("auto.offset.reset")
    val LOCATION_TOPIC = configs.getProperty("location.topic")
    val mysql_username = configs.getProperty("mysql.username")
    val mysql_password = configs.getProperty("mysql.password")
    val elasticsearch_host = configs.getProperty("elasticsearch.host", "192.168.5.180")
    val elasticsearch_port = configs.getProperty("elasticsearch.port", "9200")
    val KAFKA_OUT_TOPIC = configs.getProperty("send.location.warning.topic")
    val redisKey = configs.getProperty("redis.location.warning.key")

    val conf = new SparkConf().setAppName("FaceStreaming").setMaster("local[8]")
      .set("es.nodes", elasticsearch_host)
      .set("es.port", elasticsearch_port)
      .set("es.mapping.date.rich", "false")

    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")
    val ssc = new StreamingContext(sc, Seconds(10))
    val session = SparkSession.builder().config(conf).getOrCreate()


    val properties = new Properties()
    properties.put("user", mysql_username)
    properties.put("password", mysql_password)
    val prop = sc.broadcast(properties)
    val deviceParam: Broadcast[Dataset[Row]] = ssc.sparkContext.broadcast(TransData.transFaceData(session))
    val producer: Broadcast[KafkaSink[String, String]] = {
      val kafkaProducerConfig = {
        val p = new Properties()
        p.setProperty("bootstrap.servers", KAFKA_SERVERS)
        p.setProperty("key.serializer", classOf[StringSerializer].getName)
        p.setProperty("value.serializer", classOf[StringSerializer].getName)
        p
      }
      ssc.sparkContext.broadcast(KafkaSink(kafkaProducerConfig))
    }
    val kafkaparam = Map(
      "bootstrap.servers" -> KAFKA_SERVERS,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> KAFKA_GROUPID,
      "auto.offset.reset" -> OFFSET,
//            "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val topics = Array(LOCATION_TOPIC)

    val stream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaparam)
    )
    stream.map(x=>{
      x.value()
    })
      .foreachRDD(rdds => {

        val redisData: Array[String] = MonitorRedisMethod.loadNumFromRedis(redisKey)
        val redisCodes = redisData
          .map(jsonString => {
            JSON.parseObject(jsonString).getString("code")
          })
        for (elem <- redisData) {
          println(elem)
        }

        rdds.foreachPartition(partitonRdd => {
          partitonRdd
            .map(x => {
              JSON.parseObject(x)
            })
            .filter(x => {
              var flag = false
              var code: String = ""
              if (x.getString("location") != null) {
                code = x.getString("location")
              }
              val deviceCode = x.getInteger("device_code") + ""

              val index: Int = redisCodes.indexOf(code)
              if (index != -1) {

                val redisObject = JSON.parseObject(redisData(index))

                val deviceCodes = redisObject.getString("deviceCode")
                val strings = deviceCodes.split(",")
                if (strings.contains(deviceCode)) {
                  val monitorId = redisObject.get("monitorId")
                  val code_type = redisObject.get("type")

                  x.put("monitorId", monitorId)
                  x.put("code_type", code_type)
                  x.put("streaming_time", ParseTime.LongToDate(System.currentTimeMillis() + ""))
                  flag = true
                }
              }
              flag
            })
            .foreach(x => {
              producer.value.send(KAFKA_OUT_TOPIC, x.toJSONString)
              println(x.toJSONString)
            })


        })
      })

    ssc.start()
    ssc.awaitTermination()
  }

}
