package com.lagou.sparkstreaming
/*
  创建streaming，读取topic1的消息，按要求处理后发送给topic2
 */
import java.util.Properties

import cn.lagou.Streaming.kafka.OffsetsWithRedisUtils
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object KafkaDStream {
  val log = Logger.getLogger(this.getClass)

  def main(args: Array[String]): Unit = {
    //创建streaming context
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    val ssc = new StreamingContext(conf,Seconds(1))

    //待消费主题topic1
    val topics = Array("topic1")
    val groupId = "group1"
    //获取kafka consumer参数
    val consumer = getKafkaConsumerParameter(groupId)
    // 从Redis获取offsets
    val offsets = OffsetsWithRedisUtils.getOffsetsFromRedis(topics,groupId)
//    offsets.foreach(println)

    // 创建DStream，从topic1读取数据
    val dstream = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String,String](topics, consumer, offsets)
    )

    //DStream转换输出，发送给topic2
    dstream.foreachRDD{
      rdd=>
        if(!rdd.isEmpty()){
          val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
//          offsetRanges.foreach(println)
          rdd.foreachPartition(process)
          // 将offsets保存到Redis
          OffsetsWithRedisUtils.saveOffsetsToRedis(offsetRanges,groupId)
        }
    }

    // 启动作业
    ssc.start()
    ssc.awaitTermination()
  }

  def process(iter:Iterator[ConsumerRecord[String,String]]): Iterator[ConsumerRecord[String,String]] = {
    iter.map(line => parse((line.value())))
      .filter(!_.isEmpty)
      .foreach(line => sendMsg(line,"topic2"))
    iter
  }

  //对数据进行格式处理
  def parse(text:String): String = {
    try {
      val arr: Array[String] = text.split(",")
      if (arr.length != 15)
        return ""
      arr.mkString("|")
    } catch {
      case e: Exception =>
        log.error("解析出错！", e)
        ""
    }
  }

  //发送消息
  def sendMsg(msg:String, topic:String) = {
    val producer = new KafkaProducer[String,String](getKafkaProducerParameters())
    val record = new ProducerRecord[String,String](topic,msg)
    producer.send(record)
  }

  //kafka consumer参数
  def getKafkaConsumerParameter(groupId: String): Map[String,Object] = {
    Map[String,Object](
    ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG->"server1:9092",
    ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG->classOf[StringDeserializer],
    ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG->classOf[StringDeserializer],
    ConsumerConfig.GROUP_ID_CONFIG->groupId,
    ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG->(false:java.lang.Boolean),
    ConsumerConfig.AUTO_OFFSET_RESET_CONFIG->"earliest"
    )
  }

  //kafka producer参数
  def getKafkaProducerParameters(): Properties = {
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"server1:9092")
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
    prop
  }
}
