package com.lagou.homework.sparkstream

import java.util.Properties

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}


object SparkKafka {
    def main(args: Array[String]): Unit = {
//        Logger.getLogger("org").setLevel(Level.WARN)
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getCanonicalName.init)
                .setMaster("local[*]")
        val ssc: StreamingContext = new StreamingContext(conf, Seconds(5))
        ssc.sparkContext.setLogLevel("warn")

        val brokers = "linux121:9092"
        val topics: Array[String] = Array("topic-spark-1")
        val groupId = "group1"
        //获取kafka参数
        val kafkaConsumerParameters: Map[String, Object] = getKafkaConsumerParameters(brokers, groupId)

        //从redis中获取offsets
        val offsets: Map[TopicPartition, Long] = OffsetRedisUtils.getOffsetsFromRedis(topics, groupId)

        //创建Dstream
        val dstream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
            ssc,
            LocationStrategies.PreferConsistent,
            //读取数据
            ConsumerStrategies.Subscribe[String, String](topics, kafkaConsumerParameters, offsets)
        )

        //数据转换
        dstream.foreachRDD { (rdd,time) =>
            if (! rdd.isEmpty()) {
                //获取offsets
                val offsetRanges: Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
                //处理数据并发送给topicB
                rdd.foreachPartition(process)
                //保存offsets到Redis
                OffsetRedisUtils.saveOffsetsToRedis(offsetRanges,groupId)
            }
        }

        //启动作业
        ssc.start()
        ssc.awaitTermination()
    }

    def getKafkaConsumerParameters(brokers: String, groupid: String): Map[String, Object] = {
        Map[String, Object](
            ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> brokers,
            ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
            ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
            ConsumerConfig.GROUP_ID_CONFIG -> groupid,
            ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> (false: java.lang.Boolean)
        )
    }

    def getKafkaProducerParameter(): Properties ={
        val prop: Properties = new Properties()
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"linux121:9092,linux122:9092")
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
        prop
    }

    def process(iter: Iterator[ConsumerRecord[String, String]]) = {
        iter.map(line => {
            val fields: Array[String] = line.value().replace("<<<!>>>", "").split(",")
            fields.mkString("|")
        }).filter(!_.isEmpty)
                .foreach(x => send2Topic(x, "topic-spark-2"))
    }

    def send2Topic(str: String, topic: String) = {
        val producer: KafkaProducer[String, String] = new KafkaProducer[String,String](getKafkaProducerParameter())
        val record: ProducerRecord[String, String] = new ProducerRecord[String,String](topic,str)
        producer.send(record)
    }
}
