package com.zt.bigdata.template.kafka

import java.util
import java.util.concurrent.Future
import java.util.{Properties, UUID}

import com.fasterxml.jackson.databind.node.ObjectNode
import com.zt.bigdata.common.utils.JsonHelper
import com.zt.bigdata.spark.common.dto.StreamRecord
import com.zt.bigdata.spark.common.message.NotifyMessage
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.{DataFrame, Dataset, Encoders, SparkSession}

object KafkaTemplate extends Logging with Serializable {

  /**
    * 封装消息到StreamRecord 带schema
    *
    * @param spark
    * @param kafkaServers
    * @param topics
    * @param startingOffsets
    * @param pollTimeoutMs
    * @param numRetries
    * @param retryIntervalMs
    * @param m
    * @tparam E
    * @return Dataset
    */
  def sourceKafkaStream[E <: Product](spark: SparkSession, kafkaServers: String, topics: String,
                                      startingOffsets: String = "latest",
                                      pollTimeoutMs: Long = 1024,
                                      numRetries: Int = 1,
                                      retryIntervalMs: Long = 10)(implicit m: Manifest[E]): Dataset[StreamRecord[E]] = {
    val df = sourceKafkaRawStream(spark, kafkaServers, topics, startingOffsets, pollTimeoutMs, numRetries, retryIntervalMs)
    import spark.implicits._
    val schema = Encoders.product[E].schema
    // 根据name相同，把数据封装到定义的StreamRecord中
    val ds = df.withColumn("body", from_json(col("value").cast(StringType), schema)).as[StreamRecord[E]]
    ds.printSchema()
    ds
  }

  /**
    * 封装消息到StreamRecord 无schema
    *
    * @param spark
    * @param kafkaServers
    * @param topics
    * @param startingOffsets
    * @param pollTimeoutMs
    * @param numRetries
    * @param retryIntervalMs
    * @return Dataset
    */
  def sourceKafkaStringStream(spark: SparkSession, kafkaServers: String, topics: String,
                              startingOffsets: String = "latest",
                              pollTimeoutMs: Long = 512,
                              numRetries: Int = 3,
                              retryIntervalMs: Long = 10): Dataset[StreamRecord[String]] = {
    // 订阅kafka消息，根据name相同匹配封装信息
    import spark.implicits._

    val ds = sourceKafkaRawStream(spark, kafkaServers, topics, startingOffsets, pollTimeoutMs, numRetries, retryIntervalMs)
      .withColumn("body", col("value").cast(StringType)).as[StreamRecord[String]]
    ds.printSchema()
    ds
  }

  /**
    * 订阅kafka消息
    *
    * @param spark
    * @param kafkaServers
    * @param topics
    * @param startingOffsets
    * @param pollTimeoutMs
    * @param numRetries
    * @param retryIntervalMs
    * @return DataFrame
    */
  def sourceKafkaRawStream(spark: SparkSession, kafkaServers: String, topics: String,
                           startingOffsets: String = "latest",
                           pollTimeoutMs: Long = 1024,
                           numRetries: Int = 1,
                           retryIntervalMs: Long = 10): DataFrame = {
    spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", kafkaServers)
      .option("startingOffsets", startingOffsets)
      .option("kafkaConsumer.pollTimeoutMs", pollTimeoutMs)
      .option("key.deserializer", "StringDeserializer")
      .option("value.deserializer", "StringDeserializer")
      .option("fetchOffset.numRetries", numRetries)
      .option("fetchOffset.retryIntervalMs", retryIntervalMs)
      .option("subscribe", topics)
      .option("failOnDataLoss", value = false)
      .load()
  }

  /**
    * 回写kafka消息
    *
    * @param ds
    * @param kafkaServers
    * @param topics
    */
  def sinkKafkaStream(ds: Dataset[_], kafkaServers: String, topics: String): Unit = {
    val covertDS =
      ds match {
        case value: Dataset[String] =>
          import ds.sparkSession.implicits._
          value.map(KafkaWriteRecord)
        case _ =>
          ds
      }
    covertDS.writeStream
      .format("kafka")
      .option("kafka.bootstrap.servers", kafkaServers)
      .option("topic", topics)
      .start()
  }

  case class CustomMQ(kafkaServers: String, ack: String = "1") {

    private var producer: KafkaProducer[String, String] = _

    def subscribe(topics: String, triggerIntervalMs: Int = 100, pollTimeoutMs: Long = 512)
                 (processFun: (String, String) => Boolean) = {
      val props = new Properties()
      props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServers)
      props.put(ConsumerConfig.GROUP_ID_CONFIG, (kafkaServers + "_" + topics).hashCode + "")
      props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
      props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
      val consumer = new KafkaConsumer[String, String](props)
      import scala.collection.JavaConversions._
      val topicList: util.Collection[String] = topics.split(",").toList
      consumer.subscribe(topicList)
      new Thread(new Runnable {
        override def run(): Unit = {
          while (true) {
            val records = consumer.poll(pollTimeoutMs)
            if (records.isEmpty) {
              Thread.sleep(triggerIntervalMs)
            } else {
              if (!records.map(record => {
                try {
                  processFun(record.key(), record.value())
                } catch {
                  case e: Throwable =>
                    log.error(s"Kafka subscribe [$topics] error , at ${record.value()}", e)
                    false
                }
              }).contains(false)) {
                // TODO 部分成功处理
                consumer.commitAsync()
              }
            }
          }
        }
      }).start()
    }

    def publish(topic: String, key: String, message: String): Future[RecordMetadata] = {
      if (producer == null) {
        log.info("Init Kafka Producer")
        val props = new Properties()
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServers)
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
        props.put(ProducerConfig.ACKS_CONFIG, ack)
        producer = new KafkaProducer[String, String](props)
      }
      producer.send(new ProducerRecord[String, String](topic, key, message))
    }

    def publish(topic: String, message: String): Future[RecordMetadata] = {
      publish(topic, null, message)
    }

    /**
      * 发送消息
      *
      * @param key    消息Key
      * @param id     消息体唯一主键
      * @param value  消息Body
      * @param execMs 延时时间 毫秒
      * @return
      */
    def publishToTimer(key: String, id: String, value: String, execMs: Long = 0): Future[RecordMetadata] = {
      def buildMessage(key: String, value: String, execMs: Long): ObjectNode = {
        val objNode = JsonHelper.createObjectNode()
        objNode.put("key", key)
        objNode.put("value", value)
        objNode.put("execMs", execMs)
        objNode
      }

      if (key == null || key == "") {
        throw new NullPointerException("Key Can't Be Empty")
      }
      if (id == null || id == "") {
        throw new NullPointerException("ID Can't Be Empty")
      }
      if (value == null || value == "") {
        throw new NullPointerException("Value Can't Be Empty")
      }
      publish(CustomMQP.TIMER_TOPIC, key, JsonHelper.toJsonString(buildMessage(id, value, System.currentTimeMillis() + execMs)))
    }

    def publishToTimerWithoutId(key: String, value: String, execMs: Long = 0): Future[RecordMetadata] = {
      this.publishToTimer(key, UUID.randomUUID().toString, value, execMs)
    }


    def publishToNotifier(key: String, receivers: Set[String], templateId: String,
                          messageInfo: String, messageType: String): Future[RecordMetadata] = {
      def buildMessage(receivers: Set[String], templateId: String,
                       messageInfo: String, messageType: String): ObjectNode = {
        val objNode = JsonHelper.createObjectNode()
        objNode.set("receivers", JsonHelper.toJson(receivers))
        objNode.put("templateId", templateId)
        objNode.put("messageInfo", messageInfo)
        objNode.put("messageType", messageType)
        objNode
      }

      if (messageInfo == null || messageInfo == "") {
        throw new NullPointerException("MessageInfo Can't Be Empty")
      }
      if (templateId == null || templateId == "") {
        throw new NullPointerException("TemplateId Can't Be Empty")
      }
      if (messageType == null || messageType == "") {
        throw new NullPointerException("MessageType Can't Be Empty")
      }
      publish(CustomMQP.NOTIFIER_TOPIC, key, JsonHelper.toJsonString(buildMessage(receivers, templateId, messageInfo, messageType)))
    }

    def publishNotifierOutput(key: String, topic: String, value: String, mode: String = CustomMQP.POINT_TO_POINT): Future[RecordMetadata] = {
      def buildMessage(topic: String, value: String, mode: String): ObjectNode = {
        val objNode = JsonHelper.createObjectNode()
        objNode.put("topic", topic)
        objNode.put("body", value)
        objNode.put("mode", mode)
        objNode
      }

      publish(CustomMQP.NOTIFIER_OUTPUT_TOPIC, key, JsonHelper.toJsonString(buildMessage(topic, value, mode)))
    }

    def errorMessage(mq: CustomMQ, item: AnyRef, applicationName: String, templateId: String, receivers: String, causeScene: String, e: Throwable): Exception = {
      val data = JsonHelper.toJsonString(item)
      log.error(s"$applicationName $causeScene error [$data]", e)
      mq.publishToNotifier(applicationName, null,
        templateId,
        NotifyMessage.buildDingTextMessage(receivers,
          s"$applicationName $causeScene [$data] cause error is [$e]").toString,
        NotifyMessage.Ding_Talk)
      new Exception(s"$applicationName $causeScene data existed error $data")
    }

    def close(): Unit = {
      if (producer != null) {
        producer.close()
      }
    }
  }

  object CustomMQP {
    val TIMER_TOPIC = "dmp.timer.input"
    val NOTIFIER_TOPIC = "dmp.notifier.notice"
    val NOTIFIER_OUTPUT_TOPIC = "dmp.notifier.output.notice"
    val POINT_TO_POINT = "POINT_TO_POINT"
    val BROADCAST = "BROADCAST"
  }

  case class KafkaWriteRecord(value: String)

}
