package com.zlm.realtime.ads

import com.alibaba.fastjson.JSON
import com.zlm.realtime.bean.OrderWide
import com.zlm.realtime.utils.{MyKafkaUtils, MyOffsetUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import scalikejdbc.config.DBs
import scalikejdbc.{DB, SQL}

import java.text.SimpleDateFormat
import java.util.Date

/**
 * Author: Harbour 
 * Date: 2021-04-23 22:59
 * Desc: 根据 TrademarkStatus 报表及可视化来生成统计数据
 */
object TrademarkStatusApp {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("TrademarkStatusApp").setMaster("local[*]")
        val ssc = new StreamingContext(conf, Seconds(5))

        val topic = "dws_order_wide"
        val groupId = "ads_trademark_status_group"

        var offsetRanges: Array[OffsetRange] = Array.empty[OffsetRange]
        val offsetMap: Map[TopicPartition, Long] = MyOffsetUtils.getOffsetFromMySQL(topic, groupId)

        MyKafkaUtils.getKafkaStream(topic, ssc, offsetMap, groupId).transform(
            (rdd: RDD[ConsumerRecord[String, String]]) => {
                offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
                rdd
            }
        ).map(
            // 转换为OrderWide
            (record: ConsumerRecord[String, String]) => {
                val orderWide: OrderWide = JSON.parseObject(record.value(), classOf[OrderWide])
                (orderWide.tm_id + "_" + orderWide.tm_name, orderWide.final_detail_amount)
            }
        ).reduceByKey(
            _+_
        ).foreachRDD(

            (rdd: RDD[(String, Double)]) => {
                // rdd.collect() 在driver端，以Array形式返回数据集中的所有元素,
                // 这样可以把所有的数据聚合到Driver端，避免分布式事务
                val trademarkArr: Array[(String, Double)] = rdd.collect()

                if (trademarkArr != null && trademarkArr.nonEmpty) {
                    DBs.setup() // 加载了配置文件中的配置
                    DB.localTx(
                        implicit session => {
                            val date: String = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())
                            val paramList: List[Seq[Any]] = trademarkArr.toList.map(
                                (e: (String, Double)) => {
                                    /**
                                    `stat_time` datetime,
                                    `trademark_id` varchar(20),
                                    `trademark_name` varchar(200),
                                    `amount` decimal(16,2),
                                     */
                                    Seq(
                                        date,
                                        e._1.split("_")(0), // id
                                        e._1.split("_")(1), // name
                                        Math.round(e._2 * 100D) / 100D // amount
                                    )
                                }
                            )
                            SQL("insert into trademark_amount_stat values(?,?,?,?)")
                              .batch(paramList.toSeq: _*).apply()

                            for (offsetRange <- offsetRanges) {
                                SQL("replace into tab_offset values(?,?,?,?)").bind(
                                    groupId,
                                    topic,
                                    offsetRange.partition,
                                    offsetRange.untilOffset
                                ).update().apply()
                            }
                        }
                    )
                }
            }
        )

        ssc.start()
        ssc.awaitTermination()
    }
}
