package com.zlm.realtime.dim

import com.alibaba.fastjson.{JSON, JSONObject}
import com.zlm.realtime.bean.SKUInfo
import com.zlm.realtime.utils.{MyKafkaUtils, MyOffsetUtils, MyPhoenixUtils, MyPropertiesUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable.ListBuffer

/**
 * Author: Harbour
 * Date: 2021-04-19 9:17
 * Desc: 由于商标信息、SKU、SPU、分类信息等，不会经常发生变化，因此可以聚合起来，进行维度退化，保存到HBase
 */
object BaseSKUInfoAPP {
    def main(args: Array[String]): Unit = {
        // step 1. 获取context
        val conf: SparkConf = new SparkConf().setAppName("SingleSKUInfoAPP").setMaster("local[*]")
        val ssc = new StreamingContext(conf, Seconds(5))

        // step 2. 获取kafka输入流
        val topic = "ods_sku_info"
        val groupId = "dim_sku_info_group"

        val offsetMap: Map[TopicPartition, Long] = MyOffsetUtils.getOffsetFromRedis(topic, groupId)
        val kafkaInputDStream: InputDStream[ConsumerRecord[String, String]] =
            MyKafkaUtils.getKafkaStream(topic, ssc, offsetMap, groupId)

        var offsetRanges = Array.empty[OffsetRange]
        val skuInfoDStream: DStream[SKUInfo] = kafkaInputDStream.transform( // step 3. 保存偏移量
            (kafkaRDD: RDD[ConsumerRecord[String, String]]) => {
                offsetRanges = kafkaRDD.asInstanceOf[HasOffsetRanges].offsetRanges
                kafkaRDD
            }
        ).map(
            (record: ConsumerRecord[String, String]) => {
                val sKUInfo: SKUInfo = JSON.parseObject(record.value(), classOf[SKUInfo])
                sKUInfo
            }
        )

        val joinedSKUInfoDStream: DStream[SKUInfo] = skuInfoDStream.transform(
            // step 3. 将record转为对应的 SKUInfo 对象
            (skuInfoRDD: RDD[SKUInfo]) => {
                if (skuInfoRDD.count() > 0) {
                    // 查询商标信息，类别信息，spu信息，保存到广播变量中
                    val bcMapList: Broadcast[List[Map[String, JSONObject]]] = ssc.sparkContext.broadcast{
                        List(getTradeMarkMap, getCategory3Map, getSPUInfoMap)
                    }

                    // 处理rdd，将sku信息与商标信息，类别信息，spu信息关联起来
                    val joinedSKUInfoRDD: RDD[SKUInfo] = skuInfoRDD.mapPartitions(
                        (skuInfoIter: Iterator[SKUInfo]) => {
                            val skuInfoList: List[SKUInfo] = skuInfoIter.toList
                            skuInfoListCorrelateToList(skuInfoList, bcMapList.value).iterator
                        }
                    )
                    joinedSKUInfoRDD
                } else {
                    skuInfoRDD
                }
            }
        )

        joinedSKUInfoDStream.foreachRDD( // step 4. 保存到 HBase
            (skuInfoRDD: RDD[SKUInfo]) => {
                import org.apache.phoenix.spark._
                skuInfoRDD.saveToPhoenix(
                    tableName = "mall_sku_info",
                    Seq("ID", "SPU_ID", "PRICE", "SKU_NAME", "TM_ID", "CATEGORY3_ID", "CREATE_TIME", "CATEGORY3_NAME", "SPU_NAME", "TM_NAME"),
                    new Configuration,
                    zkUrl = Some(MyPropertiesUtils.getProperty("zk.host"))
                )
                MyOffsetUtils.saveOffsetToRedis(topic, groupId, offsetRanges)
            }
        )

        ssc.start()
        ssc.awaitTermination()
    }


    def getTradeMarkMap: Map[String, JSONObject] = {
        val sql = "select id, tm_name from mall_base_trademark"
        val tradeMarkJsonList: ListBuffer[JSONObject] = MyPhoenixUtils.queryAll(sql)
        tradeMarkJsonList.map((e: JSONObject) => (e.getString("ID"), e)).toMap
    }

    def getSPUInfoMap: Map[String, JSONObject] = {
        val sql = "select id, spu_name from mall_spu_info"
        val spuInfoJsonList: ListBuffer[JSONObject] = MyPhoenixUtils.queryAll(sql)
        spuInfoJsonList.map((e: JSONObject) => (e.getString("ID"), e)).toMap
    }

    def getCategory3Map: Map[String, JSONObject] = {
        val sql = "select id, name, category2_id from mall_base_category3"
        val category3JsonList: ListBuffer[JSONObject] = MyPhoenixUtils.queryAll(sql)
        category3JsonList.map((e: JSONObject) => (e.getString("ID"), e)).toMap
    }

    /**
     * 将SKU信息与 其他信息关联起来
     * @param skuInfoList SKU信息
     * @param infoList 其他信息
     */
    def skuInfoListCorrelateToList(skuInfoList: List[SKUInfo], infoList: List[Map[String, JSONObject]]): List[SKUInfo] = {
        // step 1：获取封装的map
        val tradeMap: Map[String, JSONObject] = infoList.head
        val cateMap: Map[String, JSONObject] = infoList(1)
        val spuMap: Map[String, JSONObject] = infoList(2)

        // step 2： 将map内部封装的属性取出，并与skuInfo关联
        for (skuInfo <- skuInfoList) {
            val trademarkJsonObj: JSONObject = tradeMap.getOrElse(skuInfo.tm_id, null)
            val category3JsonObj: JSONObject = cateMap.getOrElse(skuInfo.category3_id, null)
            val spuInfoJsonObj: JSONObject = spuMap.getOrElse(skuInfo.spu_id, null)

            if (trademarkJsonObj != null) {
                println(trademarkJsonObj.getString("TM_NAME"))
                skuInfo.tm_name = trademarkJsonObj.getString("TM_NAME")
            }
            if (category3JsonObj != null) skuInfo.category3_name = category3JsonObj.getString("NAME")
            if (spuInfoJsonObj != null) skuInfo.spu_name = spuInfoJsonObj.getString("SPU_NAME")
        }

        // step 3：返回skuInfoList
        skuInfoList
    }
}
