package com.zlm.realtime.ods

import com.alibaba.fastjson.{JSON, JSONObject}
import com.zlm.realtime.utils.{MyKafkaSinkUtils, MyKafkaUtils, MyOffsetUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Author: Harbour 
 * Date: 2021-04-16 14:14
 * Desc: 基于maxwell的kafka数据分流APP
 */
object BaseDBMaxwellApp {
    def main(args: Array[String]): Unit = {

        val conf: SparkConf = new SparkConf().setAppName("MaxwellAPP").setMaster("local[*]")
        val ssc = new StreamingContext(conf, Seconds(2))


        val topic = "mall-db-maxwell"
        val groupId = "mall-db-maxwell-group"
        val offsetMap: Map[TopicPartition, Long] = MyOffsetUtils.getOffsetFromRedis(topic, groupId)
        var offsetRanges: Array[OffsetRange] = Array.empty[OffsetRange]

        val kafkaInputDStream: InputDStream[ConsumerRecord[String, String]] =
            MyKafkaUtils.getKafkaStream(topic, ssc, offsetMap, groupId)

        kafkaInputDStream.transform(
            (kafkaRDD: RDD[ConsumerRecord[String, String]]) => {
                offsetRanges = kafkaRDD.asInstanceOf[HasOffsetRanges].offsetRanges
                kafkaRDD
            }
        ).mapPartitions(
            (jsonIter: Iterator[ConsumerRecord[String, String]]) => {
                jsonIter.map(
                    (record: ConsumerRecord[String, String]) => {
                        JSON.parseObject(record.value())
                    }
                )
            }
        ).foreachRDD(
            (rdd: RDD[JSONObject]) => {
                rdd.foreach(
                    (jsonObj: JSONObject) => {
                        // 此处注意maxwell在执行bootstrap时，会在数据前后传递两个空串，因此要判断一下
                        val msg: JSONObject = jsonObj.getJSONObject("data")
                        val dataType: String = jsonObj.getString("type")
                        if (msg != null && !msg.isEmpty) {
                            val tableName: String = jsonObj.getString("table")
                            if (
                                 ("order_info".equals(tableName) && "insert".equals(dataType))
                              || (tableName.equals("order_detail") && "insert".equals(dataType))
                              || tableName.equals("base_province")
                              || tableName.equals("user_info")
                              || tableName.equals("sku_info")
                              || tableName.equals("spu_info")
                              || tableName.equals("base_trademark")
                              || tableName.equals("base_category3")
                            ) {
                                val sendTopic: String = "ods_" + tableName
                                MyKafkaSinkUtils.send(sendTopic, msg.toString)
                            }
                        }
                    }
                )
                MyOffsetUtils.saveOffsetToRedis(topic, groupId, offsetRanges)
            }
        )

        ssc.start()
        ssc.awaitTermination()
    }
}
