package com.atguigu.gmall.realtime.app.newapp

import com.atguigu.gmall.realtime.app.newapp.DwCartInfoApp.sparkConf
import com.atguigu.gmall.realtime.bean.{CartInfo, CommentInfo}
import com.atguigu.gmall.realtime.handler.{CommonApp, DimHandler, EsHandler}
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream

import java.time.LocalDate

// comment_info 和 商品维表join  用户维表
object DwCommentApp extends CommonApp {


  def main(args: Array[String]): Unit = {
    val groupId: String = "dw_comment_info_app"
    val topic: String = "DWD_COMMENT_INFO_I"
    val ssc: StreamingContext = initSsc(groupId, "local[4]", 5)

    val commentInfoDStream: DStream[CommentInfo] = getDstream[CommentInfo](ssc, topic, groupId)

    val dtDstream: DStream[CommentInfo] = commentInfoDStream.map { commentInfo =>
      val createTimeArr: Array[String] = commentInfo.create_time.split(" ")
      commentInfo.create_date = createTimeArr(0)
      val timeArr: Array[String] = createTimeArr(1).split(":")
      commentInfo.create_hour = timeArr(0)
      commentInfo
    }


    val commentInfoWithUDstream: DStream[CommentInfo] = DimHandler.joinUser[CommentInfo](dtDstream,
      { commentInfo: CommentInfo => commentInfo.user_id.toString }

    )

    val commentInfoWithSkuDStream: DStream[CommentInfo] = DimHandler.joinSku[CommentInfo](commentInfoWithUDstream,
      { commentInfo: CommentInfo => commentInfo.sku_id.toString }

    )

    val commentInfoWithAppraiseDStream: DStream[CommentInfo] = DimHandler.joinBaseDic[CommentInfo](commentInfoWithSkuDStream,
      { commentInfo: CommentInfo => commentInfo.appraise },
      { (commentInfo, dicName) => commentInfo.appraise_name = dicName }

    )

    commentInfoWithAppraiseDStream.cache()
    commentInfoWithAppraiseDStream.print(1000)



    EsHandler.saveDStreamToEs[CommentInfo](commentInfoWithAppraiseDStream,
      "gmall2022_comment_info_",
      "id",
      {Unit=>this.commitAllOffsetKafka()}
    )


    ssc.start()
    ssc.awaitTermination()

  }

}
