package com.atguigu.gmall.realtime.app.newapp

import com.atguigu.gmall.realtime.app.newapp.DwCommentApp.sparkConf
import com.atguigu.gmall.realtime.bean.{CommentInfo, FavorInfo}
import com.atguigu.gmall.realtime.handler.{CommonApp, DimHandler, EsHandler}
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream

import java.time.LocalDate

// comment_info 和 商品维表join  用户维表
object DwFavorApp extends CommonApp {


  def main(args: Array[String]): Unit = {
    val groupId: String = "dw_favor_info_app"
    val topic: String = "DWD_FAVOR_INFO_I"
    val ssc: StreamingContext = initSsc(groupId, "local[4]", 5)

    val dStream: DStream[FavorInfo] = getDstream[FavorInfo](ssc, topic, groupId)

    val dtDstream: DStream[FavorInfo] = dStream.map { objInfo =>
      val createTimeArr: Array[String] = objInfo.create_time.split(" ")
      objInfo.create_date = createTimeArr(0)
      val timeArr: Array[String] = createTimeArr(1).split(":")
      objInfo.create_hour = timeArr(0)
      objInfo
    }

    val dataWithUDstream: DStream[FavorInfo] = DimHandler.joinUser[FavorInfo](dtDstream,
      { favorInfo: FavorInfo => favorInfo.user_id.toString }
    )

    val dataWithSkuDStream: DStream[FavorInfo] = DimHandler.joinSku[FavorInfo](dataWithUDstream,
      { favorInfo: FavorInfo => favorInfo.sku_id.toString }
    )

    dataWithSkuDStream.cache()
    dataWithSkuDStream.print(1000)



    EsHandler.saveDStreamToEs[FavorInfo](dataWithSkuDStream,
      "gmall2022_favor_info_",
      "id",
      {Unit=>this.commitAllOffsetKafka()}
    )


    ssc.start()
    ssc.awaitTermination()

  }

}
