package com.example.homework

import java.util.Properties

import com.example.sparkkafka.OffsetsRedisUtils
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object Test1 {
  Logger.getLogger("org").setLevel(Level.ERROR)
  private val conf = new SparkConf()
    .setAppName(this.getClass.getCanonicalName)
    .setMaster("local[*]")

  val ssc = new StreamingContext(conf, Seconds(5))
  private val sourceTopic = "mytopic1"
  private val dstTopic = "mytopic2"


  def main(args: Array[String]): Unit = {
    readFileAndSendToKafka(sourceTopic)
    consumerDataFromKafkaAndWriteToOtherTopic(sourceTopic, dstTopic)
//    ssc.stop()
  }

  // 读取文件，并将数据写入到Kafka中
  def readFileAndSendToKafka(topic: String): Unit = {
    // 读取sample.log文件数据
    val lines: RDD[String] = ssc.sparkContext.textFile("data/sample.log")

    // 获取kafka producer参数
    val prop = getKafkaProducerParams()

    // 将读取到的数据发送到指定的topic中
    lines.foreachPartition { iter =>
      // 初始化KafkaProducer
      val producer = new KafkaProducer[String, String](prop)
      iter.foreach { line =>
        // 封装数据
        val record = new ProducerRecord[String, String](topic, line)
        // 发送数据
        producer.send(record)
      }
      producer.close()
    }

    println(s"成功将数据写入kafka的topic:${topic}中")
    println(s"topic:${topic} 共${lines.count}条数据，${lines.partitions.length} 个partitions")
  }

  def consumerDataFromKafkaAndWriteToOtherTopic(sourceTopic:String, dstTopic: String): Unit = {
    // 需要消费的topic
    val topics: Array[String] = Array(sourceTopic)
    val groupid = "myConsumerGroup"
    // 定义kafka相关参数
    val kafkaParams: Map[String, Object] = getKafkaConsumerParams(groupid)
    // 从redis中获取offsets
    val fromOffsets = OffsetsRedisUtils.getOffsetFromRedis(topics, groupid)

    // 创建DStream
    val dstream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      // 从kafka中读取数据
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams, fromOffsets)
    )

    // 转换后的数据发送到另一个topic
    dstream.foreachRDD{rdd =>
      if (!rdd.isEmpty) {
        // 获取消费偏移量
        val offsetRanges: Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        // 处理数据发送到dstTopic
        rdd.foreachPartition(process)
        // 将offset保存到Redis
        OffsetsRedisUtils.saveOffsetsToRedis(offsetRanges, groupid)

      } // 数据处理完毕
      else {
        println("kafka中的数据已处理完毕，等待生产者生产数据")
      }
    }

    // 启动作业
    ssc.start()
    // 持续执行
    ssc.awaitTermination()
  }

  // 将处理后的数据发送到dstTopic中
  def process(iter: Iterator[ConsumerRecord[String, String]]) = {
    println(s"准备将数据写入${dstTopic}中")
    iter.map(line => parse(line.value))
//      .filter(!_.isEmpty)
      .foreach(line =>sendMsg2Topic(line, dstTopic))
  }

  // 调用kafka生产者发送消息
  def sendMsg2Topic(msg: String, topic: String): Unit = {
    val producer = new KafkaProducer[String, String](getKafkaProducerParams())
    val record = new ProducerRecord[String, String](topic, msg)
    producer.send(record)
  }

  // 修改数据格式，将逗号分隔变成竖线分割
  def parse(text: String): String = {
    try{
      val arr = text.replace("<<<!>>>", "").split(",")
//      if (arr.length != 15) return ""
      arr.mkString("|")
    } catch {
      case e: Exception =>
        println(s"解析数据出错！错误信息:${e.getMessage}")
        ""
    }
  }


  def getKafkaConsumerParams(groupId: String): Map[String, Object] = {
    Map[String, Object](
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "node1:9092,node2:9092,node3:9092",
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.GROUP_ID_CONFIG -> groupId,
      ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "earliest",
      ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> (false: java.lang.Boolean))
  }

  // 定义生产者的kafka配置
  def getKafkaProducerParams(): Properties = {
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node1:9092,node2:9092,node3:9092")
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop
  }

}
