package com.mjf.gmall.realtime.ods

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.mjf.gmall.realtime.util.{MyKafkaSink, MyKafkaUtil, OffsetManager}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}

/**
 * 将MySQL中的数据通过Canal同步，进行分流后发往Kafka
 *
 * 技术栈：
 *  zookeeper/kafka/redis/canal
 */
object BaseDbCanal {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[4]").setAppName("base_db_canal_app")

    val ssc: StreamingContext = new StreamingContext(conf, Seconds(5))

    val topic = "gmall0105_db_c"
    val groupId = "base_db_canal_group"

    // 从redis读取offset
    val kafkaOffsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(topic, groupId)

    // 从Kafka消费数据
    var recordInputStream: InputDStream[ConsumerRecord[String, String]] = null
    if (kafkaOffsetMap == null && kafkaOffsetMap.size >0) {
      // 如果redis中已经有了offset信息
      recordInputStream = MyKafkaUtil.getKafkaStream(topic, ssc, kafkaOffsetMap, groupId)
    } else {
      recordInputStream = MyKafkaUtil.getKafkaStream(topic, ssc, groupId)
    }

    //获得偏移结束点
    var offsetRanges: Array[OffsetRange] = Array.empty[OffsetRange]  // 写在算子外面为启动时执行
    val inputGetOffsetDstream: DStream[ConsumerRecord[String, String]] = recordInputStream.transform {
      rdd =>
        // 获得本批次的偏移量的结束位置，用于更新redis中的偏移量
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges  // 写在算子里面是周期性执行（driver端）
      rdd
    }

    // 将json字符串 转换为 json对象
    val jsonObjDStream: DStream[JSONObject] = inputGetOffsetDstream.map{
      record =>
        val jsonString: String = record.value()
        val jsonObj: JSONObject = JSON.parseObject(jsonString)
        jsonObj
    }

    // 将数据推回Kafka，实现分流
    jsonObjDStream.foreachRDD{
      rdd =>
        rdd.foreach{
          jsonObj =>
            val jsonArr: JSONArray = jsonObj.getJSONArray("data")
            val tableName: String = jsonObj.getString("table")
            val topic: String = "ods_" + tableName

            import scala.collection.JavaConversions._
            for(jsonObj <- jsonArr) {
//              println(jsonObj.toString)
              val msg: String = jsonObj.toString
              MyKafkaSink.send(topic, msg)  // 非幂等操作，可能会出现重复数据
            }
        }

        // 数据处理结束，偏移量提交到redis
        OffsetManager.saveOffset(topic, groupId, offsetRanges)
    }

    ssc.start()
    ssc.awaitTermination()

 }

}
