package io.wen.bd.s6m3.spark

import java.util.Properties
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object LogStreamingApp {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName).setMaster("local[*]")
    val ssc = new StreamingContext(conf, Seconds(5))

    val srcTopic = "srcSample"
    val destTopic = "destSample"
    val kafkaConsumerConfig: Map[String, String] = Map(
      "bootstrap.servers" -> "node1:9092",
      "auto.offset.reset" -> "earliest",
      "key.deserializer" -> classOf[StringDeserializer].getName,
      "value.deserializer" -> classOf[StringDeserializer].getName,
      "group.id" -> "sample-group"
    )

    // kafka-consproducer.sh --broker-list node1:9092 --topic srcSample < sample.log
    val srcDStream = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array(srcTopic), kafkaConsumerConfig)
    )

    val kafkaProducer: Broadcast[SparkKafkaProducer[String, String]] = {
      val kafkaProducerConfig = {
        val config = new Properties()
        config.setProperty("bootstrap.servers", "node1:9092")
        config.setProperty("key.serializer", classOf[StringSerializer].getName)
        config.setProperty("value.serializer", classOf[StringSerializer].getName)
        config.setProperty("acks", "all")
        config
      }
      ssc.sparkContext.broadcast(SparkKafkaProducer[String, String](kafkaProducerConfig))
    }

    srcDStream.foreachRDD(rdd => {
      if (!rdd.isEmpty()) {
        // 对每个分区数据进行清洗转换，发送到下游topic
        rdd.foreachPartition(partition => {
          val metadata = partition
            .map(record => record.value())
            .map(line => {
              // 读取srcSample主题数据，清洗转换
              val splits = line.split(",")
              val fields = splits.map(item => item.substring(7, item.length - 7))
              Array(
                fields(0), fields(1), fields(2), fields(3), fields(4),
                fields(5), fields(6), fields(7), fields(8), fields(9),
                fields(10), fields(11), fields(12), fields(13), fields(14)
              ).mkString("|")
            })
            // 将数据发送到destSample主题
            .map(record => kafkaProducer.value.send(destTopic, record))
            .toStream
          metadata.foreach(metadata => metadata.get())
        })
      }
    })

    ssc.start()
    ssc.awaitTermination()
  }
}
