package com.test.cn.spark.streaming.kafka
import java.util.Properties

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object KafkaWorkDemo {

  def main(args: Array[String]): Unit = {
    // 初始化
//    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf = new SparkConf().setAppName("FileDStream").setMaster("local[*]")
    val ssc = new StreamingContext(conf, Seconds(5))
//    ssc.sparkContext.setLogLevel("ERROR")

    // 定义kafka相关参数
    val groupId: String = "mygroup01"
    val topics: Array[String] = Array("lagou_demo_1")
    val kafkaParams: Map[String, Object] = getKafkaConsumerParameters(groupId)

    // 创建DStream
    val dstream: InputDStream[ConsumerRecord[String, String]] =
      KafkaUtils.createDirectStream(
        ssc,
        LocationStrategies.PreferConsistent,
        ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
      )

    val formatRDD: DStream[String] = dstream.map(item => {
      val txt: String = item.value()
      val str: String = txt.replaceAll("<<<!>>>", "")
      str.split(",").mkString("|")
    })

    formatRDD.foreachRDD { (rdd, _) =>



      rdd.foreachPartition((itr) => saveAsKafka(itr))
    }

    // 启动作业
    ssc.start()
    ssc.awaitTermination()
  }

  def saveAsKafka(iter: Iterator[String]): Unit = {
    // 定义 kafka 参数
    val brokers = "linux121:9092"
    val topic1 = "lagou_demo_2"
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(
      ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
      classOf[StringSerializer]
    )
    prop.put(
      ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
      classOf[StringSerializer]
    )
    // KafkaProducer
    val producer = new KafkaProducer[String, String](prop)

    try {
      iter.foreach { case (str) =>
         val msg = new ProducerRecord[String, String](topic1, str)
          producer.send(msg)
      }
    } catch {
      case e: Exception => e.printStackTrace()
    }
  }




  def getKafkaConsumerParameters(groupid: String): Map[String, Object] = {
    Map[String, Object](
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "linux121:9092",
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[
        StringDeserializer
      ],
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[
        StringDeserializer
      ],
      ConsumerConfig.GROUP_ID_CONFIG -> groupid,
      ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> (false: java.lang.Boolean),
      ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "earliest"
    )
  }
}
