package com.haozhen.stream.homework

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/15  0:14
  */
object KafkaStreamingwork {

  import java.util.Properties

  import org.apache.kafka.clients.consumer.ConsumerRecord
  import org.apache.log4j.Logger

  val log = Logger.getLogger(this.getClass)

  def main(args: Array[String]): Unit = {
    import org.apache.spark.SparkConf
    import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
    import org.apache.spark.streaming.{Seconds, StreamingContext}

    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")

    val ssc = new StreamingContext(conf,Seconds(5))

    val topics = Array("topic1")

    val groupId = "group1"

    val kafkaParams = getKafkaConsumerParameter(groupId)

    val fromOffsets = RedisUtils.getOffsetsFormRedis(topics,groupId)

    fromOffsets.foreach(println)
    val dstream = KafkaUtils.createDirectStream(ssc,LocationStrategies.PreferConsistent,
          ConsumerStrategies.Subscribe[String,String](topics,kafkaParams,fromOffsets))


    dstream.foreachRDD{
      rdd=>
        if(!rdd.isEmpty()){
          rdd.foreach(print)
          import org.apache.spark.streaming.kafka010.HasOffsetRanges
          val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
          offsetRanges.foreach(println)
          rdd.foreachPartition(process)
          RedisUtils.saveOffsetsToRedis(offsetRanges,groupId)
        }
    }

      ssc.start()
      ssc.awaitTermination()
  }

  def process(iter:Iterator[ConsumerRecord[String,String]])={
    iter.map(line=>parse((line.value())))
      .filter(!_.isEmpty)
      .foreach(line=>sendMsg(line,"topic2"))
  }

  def parse(text:String):String= {
    try {
      val arr = text.replace("<<<!>>>", "").split(",")
      if (arr.length != 15) return ""
      arr.mkString("|")
    } catch {
      case e: Exception =>
        log.error("解析出错！", e)
        ""
    }
  }

    def getKafkaConsumerParameter(groupId:String):Map[String,Object]={
      import org.apache.kafka.clients.consumer.ConsumerConfig
      import org.apache.kafka.common.serialization.StringDeserializer
      Map[String,Object](
        ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG->"test1:9092",
        ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG->classOf[StringDeserializer],
        ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG->classOf[StringDeserializer],
        ConsumerConfig.GROUP_ID_CONFIG->groupId,
        ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG->(false:java.lang.Boolean),
        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG->"earliest"

      )
    }

  def getKafkaProducerParameters():Properties={
    import org.apache.kafka.clients.producer.ProducerConfig
    import org.apache.kafka.common.serialization.StringSerializer
    val prop = new Properties()
      prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"test1:9092")
      prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
      prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
      prop
  }

  def sendMsg(msg:String,topic:String)={
    import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
    val producer = new KafkaProducer[String,String](getKafkaProducerParameters())

    val record = new ProducerRecord[String,String](topic,msg)
    producer.send(record)
  }

}
