package cn.lagou.spark.withKafka

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.util.Properties

object Worker {
  val log: Logger = Logger.getLogger(this.getClass)

  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf: SparkConf = new SparkConf()
      .setAppName(this.getClass.getCanonicalName)
      .setMaster("local[4]")
    val ssc = new StreamingContext(conf, Seconds(2))

    // 需要消费的topic
    val topics: Array[String] = Array("st1001")
    val groupId = "sg1"
    // kafka参数
    val kafkaParams: Map[String, Object] = getKafkaConsumerParameters(groupId)
    // 从Redis获取offset
    val offsets: Map[TopicPartition, Long] = OffsetsRedisUtils.getOffsetsFromRedis(topics, groupId)
    println(s"redis offsets: $offsets")

    val dStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams,offsets)
    )

    // 转换后的数据发送到另一个topic
    dStream.foreachRDD { (rdd, time) =>
      if (!rdd.isEmpty) {
        println(s"*********** rdd.count = ${rdd.count()}; time = $time ***********")
        // 获取消费偏移量
        val offsetRanges: Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        // 处理数据发送到 st1002
        rdd.foreachPartition(process)
        // 将offset保存到Redis
        OffsetsRedisUtils.saveOffsetsToRedis(offsetRanges, groupId)
      }
//      else  println("RDD is empty.")
    }

    // 启动作业
    ssc.start()
    // 持续执行
    ssc.awaitTermination()
  }

  // 将处理后的数据发送到 st1002
  def process(iter: Iterator[ConsumerRecord[String, String]]): Unit = {
    iter.map {
      line =>
        //      line.value().replace("\\s+","|")
        line.value.split("\\s+").mkString("|")
    }
      .filter(_.nonEmpty)
      .foreach(line => sendMsg2Topic(line, "st1002")
      )
  }
    // kafka生产者发送消息
    def sendMsg2Topic(msg: String, topic: String): Unit = {
      val producer = new KafkaProducer[String, String](getKafkaProducerParameters)
      val record = new ProducerRecord[String, String](topic, msg)
      producer.send(record)
    }


    // kafka消费者配置
    def getKafkaConsumerParameters(groupId: String): Map[String, Object] = {
      Map[String, Object](
        ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "linux121:9092",
        ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
        ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
        ConsumerConfig.GROUP_ID_CONFIG -> groupId,
        ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> (false: java.lang.Boolean),
//        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "earliest"
      )
    }

    // kafka生产者配置
    def getKafkaProducerParameters: Properties = {
      val prop = new Properties()
      prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "linux121:9092")
      prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
      prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
      prop
    }

}