package com.zlm.realtime.dim

import com.alibaba.fastjson.JSON
import com.zlm.realtime.bean.BaseTrademark
import com.zlm.realtime.utils.{MyKafkaUtils, MyOffsetUtils, MyPropertiesUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Author: Harbour 
 * Date: 2021-04-18 22:22
 * Desc: 获取商标信息的APP，
 */
object SingleTrademarkApp {
    def main(args: Array[String]): Unit = {
        // step 1. 获取context
        val conf: SparkConf = new SparkConf().setAppName("BaseTrademarkApp").setMaster("local[*]")
        val ssc = new StreamingContext(conf, Seconds(5))

        // step 2. 获取kafka输入流
        val topic = "ods_base_trademark"
        val groupId = "dim_base_trademark_group"

        val offsetMap: Map[TopicPartition, Long] = MyOffsetUtils.getOffsetFromRedis(topic, groupId)
        val kafkaInputDStream: InputDStream[ConsumerRecord[String, String]] =
            MyKafkaUtils.getKafkaStream(topic, ssc, offsetMap, groupId)

        var offsetRanges = Array.empty[OffsetRange]
        kafkaInputDStream.transform( // step 3. 保存偏移量
            (kafkaRDD: RDD[ConsumerRecord[String, String]]) => {
                offsetRanges = kafkaRDD.asInstanceOf[HasOffsetRanges].offsetRanges
                kafkaRDD
            }
        ).foreachRDD( // step 4. 保存到 HBase
            (kafkaRDD: RDD[ConsumerRecord[String, String]]) => {
                import org.apache.phoenix.spark._
                kafkaRDD.map(
                    (record: ConsumerRecord[String, String]) => {
                        JSON.parseObject(record.value(), classOf[BaseTrademark])
                    }
                ).saveToPhoenix(
                    tableName = "MALL_BASE_TRADEMARK",
                    Seq("ID", "TM_NAME"),
                    new Configuration,
                    zkUrl = Some(MyPropertiesUtils.getProperty("zk.host"))
                )

                MyOffsetUtils.saveOffsetToRedis(topic, groupId, offsetRanges)
            }
        )

        ssc.start()
        ssc.awaitTermination()
    }
}
