package com.zlm.realtime.ods

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.zlm.realtime.utils.{MyKafkaSinkUtils, MyKafkaUtils, MyOffsetUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}


/**
 * @author Harbour 
 * @date 2021-04-09 14:38
 * 用于获取Mysql日志，并提取数据
 */
object BaseDBCanalApp {
    def main(args: Array[String]): Unit = {

        val sparkConf: SparkConf = new SparkConf().setAppName("canal").setMaster("local[*]")
        val ssc = new StreamingContext(sparkConf, Seconds(2))

        val topic = "mall-db-canal"
        val groupId = "mall-db-canal-group"

        val offsetMap: Map[TopicPartition, Long] = MyOffsetUtils.getOffsetFromRedis(topic, groupId)
        val kafkaRecordStream: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtils.getKafkaStream(topic, ssc, offsetMap, groupId)

        var offsetRanges: Array[OffsetRange] = Array.empty
        kafkaRecordStream.transform(
            /* step: 获取kafkaRDD中的偏移量保存下来 */
            (record: RDD[ConsumerRecord[String, String]]) => {
                offsetRanges = record.asInstanceOf[HasOffsetRanges].offsetRanges
                record
            }
        ).map(
            /* step: 将内部RDD转为json对象  */
            (record: ConsumerRecord[String, String]) => {
                val canalInfo: JSONObject = JSON.parseObject(record.value())
                canalInfo
            }
        ).foreachRDD(
            /* step: 对json对象进行处理 */
            (rdd: RDD[JSONObject]) => {
                rdd.foreach(
                    (jsonObj: JSONObject) => {
                        val dataTopic: String = "ods_" + jsonObj.getString("table")
                        val dataType: String = jsonObj.getString("type")
                        val dataArr: JSONArray = jsonObj.getJSONArray("data")
                        import scala.collection.JavaConverters._
                        if ("INSERT".equalsIgnoreCase(dataType)) {
                            for (data <- dataArr.asScala) {
                                MyKafkaSinkUtils.send(dataTopic, data.toString)
                            }
                        }
                        // 提交偏移量
                        MyOffsetUtils.saveOffsetToRedis(topic, groupId, offsetRanges)
                    }
                )
            }
        )

        ssc.start()
        ssc.awaitTermination()
    }
}
