package com.atguigu.realtime.dwd

import java.util.Properties

import com.atguigu.realtime.BaseApp
import com.atguigu.realtime.bean.{OrderDetail, SkuInfo}
import com.atguigu.realtime.util.{MyKafkaUtil, OffsetManager}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.OffsetRange
import org.json4s.jackson.{JsonMethods, Serialization}

import scala.collection.mutable.ListBuffer

/**
 * Author atguigu
 * Date 2020/11/18 11:30
 */
object DwdOrderDetailApp extends BaseApp {
    override val master: String = "local[2]"
    override val appName: String = "DwdOrderDetail"
    override val groupId: String = "DwdOrderDetail"
    override val topic: String = "ods_order_detail"
    override val bachTime: Int = 3
    
    override def run(ssc: StreamingContext,
                     sourceStream: DStream[String],
                     offsetRanges: ListBuffer[OffsetRange]): Unit = {
        val spark: SparkSession = SparkSession.builder()
            .config(ssc.sparkContext.getConf)
            .getOrCreate()
        import spark.implicits._
        
        
        sourceStream
            .map(str => {
                implicit val format = f + toLong + toDouble
                JsonMethods.parse(str).extract[OrderDetail]
            })
            .transform(rdd => {
                // 读到sku_info信息
                val skuRDD: RDD[(String, SkuInfo)] = spark
                    .read
                    .jdbc("jdbc:phoenix:hadoop162,hadoop162,hadoop164:2181", "gmall_sku_info", new Properties())
                    .as[SkuInfo]
                    .rdd
                    .map(sku => (sku.id, sku))
                
                rdd
                    .map(detail => (detail.sku_id.toString, detail))
                    .join(skuRDD)
                    .map {
                        case (skuId, (detail, sku)) =>
                            detail.mergeSkuInfo(sku)
                    }
                
            })
            .foreachRDD(rdd => {
                rdd.foreachPartition(it => {
                    val producer: KafkaProducer[String, String] = MyKafkaUtil.getProducer
                    it.foreach(detail => {
                        implicit val f = org.json4s.DefaultFormats
                        producer.send(new ProducerRecord[String, String]("dwd_order_detail", Serialization.write(detail)))
                    })
                    
                    producer.close()
                })
                
                OffsetManager.saveOffsets(offsetRanges, groupId, topic)
            })
    }
}
