package com.pw.study.realtime.app

import com.alibaba.fastjson.JSON
import com.google.gson.Gson
import com.pw.study.common.constants.TopicConstant
import com.pw.study.realtime.bean.{OrderBean, OrderDetail, SaleDetail, UserInfo}
import com.pw.study.realtime.handle.{KafkaHandler, RDDHandler, RedisHandler}
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

import java.time.{LocalDate, LocalDateTime}
import java.time.format.DateTimeFormatter
import scala.collection.mutable.ListBuffer

object SaleDetailApp extends BaseAPP {
  appName = "saleApp"
  groupName = "saleApp"
  private val orderInfo: Array[String] = Array(TopicConstant.GMALL_ORDER_INFO)
  private val orderDetail: Array[String] = Array(TopicConstant.GMALL_ORDER_DETAIL)


  def main(args: Array[String]): Unit = {
    conf.setAppName(appName)
    // 如果index不存在，允许自动创建
    conf.set("es.index.auto.create", "true")
    // 设置es的集群地址
    conf.set("es.nodes", "hadoop112,hadoop113,hadoop114")
    conf.set("es.port", "9200")
    context = new StreamingContext(conf, Seconds(batchDuration))
    runApp({
      val dsOrderInfo = KafkaHandler.getKafkaStream(orderInfo, context, groupName)
      val dsOderDetail = KafkaHandler.getKafkaStream(orderDetail, context, groupName)
      var rangesInfo: Array[OffsetRange] = null
      var rangesDetail: Array[OffsetRange] = null
      //分装成样例类
      val dsInfo2 = dsOrderInfo.transform(rdd => {
        rangesInfo = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd.map(row => {
          val orderInfo = JSON.parseObject(row.value(), classOf[OrderBean])
          val day = DateTimeFormatter.ofPattern("yyyy-MM-dd")
          val hour = DateTimeFormatter.ofPattern("HH")
          val formatter2 = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
          val date = LocalDateTime.parse(orderInfo.create_time, formatter2)
          orderInfo.create_date = date.format(day)
          orderInfo.create_hour = date.format(hour)
          (orderInfo.id, orderInfo)
        })
      })
      val dsDetail2 = dsOderDetail.transform(rdd => {
        rangesDetail = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd.map(row => {
          val detail = JSON.parseObject(row.value(), classOf[OrderDetail])
          (detail.order_id, detail)
        })
      })
      val ds = dsInfo2.fullOuterJoin(dsDetail2)
      // 设置缓存：redis
      //两张表进行关联
      val ds2: DStream[SaleDetail] = RDDHandler.rddToOrderInfoAndOrderDetail(ds)

      //组合用户表数据
      val ds3: DStream[SaleDetail] = ds2.mapPartitions(partition => {

        val jedis: Jedis = RedisHandler.getJedisClient()
        val saleDetails: Iterator[SaleDetail] = partition.map(saleDetail => {
          val userStr = jedis.get("userinfo:" + saleDetail.user_id)
          println(s"key:userinfo + ${saleDetail.user_id}")
          println(s"userStr1:${userStr}")
          if (userStr != null) {
            val userinfo: UserInfo = JSON.parseObject(userStr, classOf[UserInfo])
            saleDetail.mergeUserInfo(userinfo)
          } else {
            throw new RuntimeException("当前用户信息不存在redis，请刷新用户缓存数据")
          }

          saleDetail
        })
        jedis.close()
        saleDetails
      })
      ds3.print()
      //写入es
      import org.elasticsearch.spark._
      ds3.foreachRDD(rdd => {
        rdd.saveToEs("gmall2022_sale_detail_" + LocalDate.now() + "/_doc", Map("es.mapping_id" -> "order_detail_id"))
        //偏移量
        dsOrderInfo.asInstanceOf[CanCommitOffsets].commitAsync(rangesInfo)
        dsOderDetail.asInstanceOf[CanCommitOffsets].commitAsync(rangesDetail)
      })

    })
  }
}
