package com.zyx.sparkdemo.streaming

import java.util.Properties
import java.util.logging.{Level, Logger}

import com.alibaba.fastjson.JSON
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author Yaxi.Zhang
 * @since 2021/9/14 20:31
 */

object KafkaSinkDemo {
  def main(args: Array[String]): Unit = {
    // 取消打印多余日志
    // Logger.getLogger("org.apache.spark").setLevel(Level.WARNING)
    // SparkConf
    val conf = new SparkConf()
    conf.setExecutorEnv("SPARK_JAVA_OPTS", "-Xms2048m -Xmx2048m -XX:MaxPermSize=4096m")

    conf.setMaster("local[2]")

    conf.setAppName(s"${this.getClass.getSimpleName}")
    val sc: SparkContext = new SparkContext(conf)

    val rdd = sc.textFile("C:\\Users\\user\\Desktop\\temp\\mktquot-err.log")

    // 广播KafkaSink
    val kafkaProducer: Broadcast[KafkaSink[String, String]] = {
      val kafkaProducerConfig = {
        val p = new Properties()
        p.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "bigdata022:9092,bigdata023:9092,bigdata024:9092")
        p.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
        p.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
        // 100ms刷新一次
        p.setProperty(ProducerConfig.LINGER_MS_CONFIG, "100")
        p
      }
      sc.broadcast(KafkaSink[String, String](kafkaProducerConfig))
    }

    // 输出到kafka
    rdd
      .repartition(1)
      .map(x => {
        val timeStrs = x.split(" ")
        val time = timeStrs(0) + " " + timeStrs(1).split("\\.")(0)
        val oJsonStr = x.split("messageCreator=")
        val length = oJsonStr.length
        val originJson = oJsonStr(length - 1)
        val oriJsonObj = JSON.parseObject(originJson)
        oriJsonObj.put("opTime", time)
        (timeStrs(0) + " " + timeStrs(1), oriJsonObj.toString())
      })
      .sortBy(_._1)
      .map(_._2)
      .repartition(1)
      .foreach(record => {
        println("+++++++++++++++++++++++" + record)
        kafkaProducer.value.send("kafka_topic_mktinfo", record)
      })

    // 关闭环境
    sc.stop()
  }
}
