package com.atguigu.gmall.realtime.app

import java.time.{LocalDate, Period}
import java.util
import com.alibaba.fastjson.{JSON, JSONObject}
import com.alibaba.fastjson.serializer.SerializeConfig
import com.atguigu.gmall.realtime.bean.{OrderDetail, OrderInfo, OrderWide}
import com.atguigu.gmall.realtime.util.{MyEsUtil, MyKafkaUtil, MyPropertiesUtil, OffsetManager, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.{HashPartitioner, Partitioner, SparkConf}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

import scala.collection.mutable.ListBuffer

object DwOrderWideApp {



  //0 环境
  //1 取 偏移量 消费kafka  *2  主表 +从表
  //2 提取偏移量  *2  主表 +从表
  //3 转型成专用的结构化对象 OrderInfo  OrderDetail
  //4 流join  主表从表合并
  //5  补充维度信息  // 用户 地区  商品
  //6  写es
  def main(args: Array[String]): Unit = {
    //0 环境
    val orderInfoTopic = "DWD_ORDER_INFO_I"
    val orderDetailTopic = "DWD_ORDER_DETAIL_I"
    val groupId = "dw_order_wide_app"

    val sparkConf: SparkConf = new SparkConf().setAppName("dw_order_wide_app")//.setMaster("local[4]")
    val ssc = new StreamingContext(sparkConf, Seconds(5))
    //1 取 偏移量 消费kafka  *2  主表 +从表
   //1.1主表
    val orderInfoOffsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(orderInfoTopic, groupId)

    var orderInfoInputDstream: InputDStream[ConsumerRecord[String, String]] = null
    //如果能够加载已保存的偏移 则通过偏移量加载kafka数据  ，否则从最新的默认位置读取偏移量
    if (orderInfoOffsetMap == null || orderInfoOffsetMap.size == 0) {
      orderInfoInputDstream = MyKafkaUtil.getKafkaStream(orderInfoTopic, ssc, groupId)
    } else {
      orderInfoInputDstream = MyKafkaUtil.getKafkaStream(orderInfoTopic, ssc, orderInfoOffsetMap, groupId)
    }
    //1.2从表
    val orderDetailOffsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(orderDetailTopic, groupId)
    var orderDetailInputDstream: InputDStream[ConsumerRecord[String, String]] = null
    //如果能够加载已保存的偏移 则通过偏移量加载kafka数据  ，否则从最新的默认位置读取偏移量
    if (orderDetailOffsetMap == null || orderDetailOffsetMap.size == 0) {
      orderDetailInputDstream = MyKafkaUtil.getKafkaStream(orderDetailTopic, ssc, groupId)
    } else {
      orderDetailInputDstream = MyKafkaUtil.getKafkaStream(orderDetailTopic, ssc, orderDetailOffsetMap, groupId)
    }

    //2 提取偏移量  *2  主表 +从表
    //2.1 主
    var orderInfoOffsetRanges: Array[OffsetRange] = null //driver
    val orderInfoInputDstreamWithOffset: DStream[ConsumerRecord[String, String]] = orderInfoInputDstream.transform { rdd =>

      orderInfoOffsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      rdd
    }
    //2.2 从表
    var orderDetailOffsetRanges: Array[OffsetRange] = null //driver
    val orderDetailInputDstreamWithOffset: DStream[ConsumerRecord[String, String]] = orderDetailInputDstream.transform { rdd =>

      orderDetailOffsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      rdd
    }


    //3 转型成专用的结构化对象 OrderInfo  OrderDetail
   //3.1
    val orderInfoDstream: DStream[OrderInfo] = orderInfoInputDstreamWithOffset.map { record =>
     val jedis: Jedis = RedisUtil.getJedisClient
      val orderInfoJsonString: String = record.value()

      val orderInfo: OrderInfo = JSON.parseObject(orderInfoJsonString, classOf[OrderInfo])
      //   补充代码：  维度信息
      //补充维度信息 ： 地区
      val provinceKey = "DIM:base_province:" + orderInfo.province_id
     val provinceJson: String = jedis.get(provinceKey)
     val provinceJsonObj: JSONObject = JSON.parseObject(provinceJson)
     orderInfo.province_name = provinceJsonObj.getString("name")
     orderInfo.province_area_code = provinceJsonObj.getString("area_code") // ali datav  quickbi baidu suger
     orderInfo.province_3166_2_code = provinceJsonObj.getString("iso_3166_2") // kibana
     orderInfo.province_iso_code = provinceJsonObj.getString("iso_code") // superset

     // 补充维度信息: 用户基本信息  :年龄 性别
     val userInfoKey = "DIM:user_info:" + orderInfo.user_id
     val userInfoJson: String = jedis.get(userInfoKey)
     val userInfoJsonObj: JSONObject = JSON.parseObject(userInfoJson)
     orderInfo.user_gender = userInfoJsonObj.getString("gender")
     val birthday = userInfoJsonObj.getString("birthday")
     val birthdayDate2: LocalDate = LocalDate.parse(birthday)
     val now: LocalDate = LocalDate.now()
     var period = Period.between(birthdayDate2, now);
     val age: Int = period.getYears
     orderInfo.user_age = age.toInt
     //补充日期字段 create_date  和 create_hour
     val dateTimeArr: Array[String] = orderInfo.create_time.split(" ")
     orderInfo.create_date= dateTimeArr(0)
     orderInfo.create_hour= dateTimeArr(1).split(":")(0)

     jedis.close()
      orderInfo
    }
    //orderInfoDstream.print(10000)

    //3.2
    val orderDetailDstream: DStream[OrderDetail] = orderDetailInputDstreamWithOffset.map { record =>
      val orderDetailJsonString: String = record.value()

      val orderDetail: OrderDetail = JSON.parseObject(orderDetailJsonString, classOf[OrderDetail])
      //   补充代码：  维度信息  //品牌 品类
      orderDetail
    }

    //orderDetailDstream.print(10000)

   //4 流join
   // 4.1  fulljoin
    val orderInfoWithkeyDstream: DStream[(Long, OrderInfo)] = orderInfoDstream.map(orderInfo=> (orderInfo.id,orderInfo  ))
    val orderDetailWithkeyDstream: DStream[(Long, OrderDetail)] = orderDetailDstream.map(orderDetail=> ( orderDetail.order_id,orderDetail  ))

 //   val orderJoinedDstream: DStream[(Long, (OrderInfo, OrderDetail))] = orderInfoWithkeyDstream.join(orderDetailWithkeyDstream)

    val orderJoinedDstream: DStream[(Long, (Option[OrderInfo], Option[OrderDetail]))] = orderInfoWithkeyDstream.fullOuterJoin(orderDetailWithkeyDstream)

    //4.2 把流调整管理
    //4.2.1  有主表
    //1）  如果有从表    获得合并结果
    // 2） 主表写缓存
    // 3）主表查缓存  如果能查到 获得合并结果
    //4.2.2  无主表 有从表
    // 1） 从表写缓存
    // 2）从表查缓存  如果能查到 获得合并结果
    val orderWideDstream: DStream[OrderWide] = orderJoinedDstream.flatMap { case (orderId, (orderInfoOpt, orderDetailOpt)) =>
      val jedis: Jedis = RedisUtil.getJedisClient
      val orderWideList = new ListBuffer[OrderWide]()
      //4.2.1  有主表
      if (orderInfoOpt != None) {
        val orderInfo: OrderInfo = orderInfoOpt.get
        //1）  如果有从表    获得合并结果
        if (orderDetailOpt != None) {
          val orderDetail: OrderDetail = orderDetailOpt.get
          val orderWide = new OrderWide(orderInfo, orderDetail)
          orderWideList.append(orderWide)
        }
        // 2） 主表写缓存
        //type ? string  1:1    key ?  ORDER_JOIN:ORDER_INFO:101  value ? orderInfoJson  field/score? 无
        //写入api？ set  读？ get     过期时间 10分钟
        val orderInfoKey = s"ORDER_JOIN:ORDER_INFO:${orderInfo.id}"
        val orderInfoJson = JSON.toJSONString(orderInfo, new SerializeConfig(true))
        jedis.setex(orderInfoKey, 10 * 60, orderInfoJson)
        //jedis.expire(orderInfoKey,)
        // 3）主表查缓存  如果能查到 获得合并结果
        val orderDetailKey = s"ORDER_JOIN:ORDER_DETAIL:${orderInfo.id}"
        val orderDetailJsonSet: util.Set[String] = jedis.smembers(orderDetailKey)
        if (orderDetailJsonSet != null && orderDetailJsonSet.size() > 0) {
          import scala.collection.JavaConverters._

          for (orderDetailJson <- orderDetailJsonSet.asScala) {
            val orderDetail: OrderDetail = JSON.parseObject(orderDetailJson, classOf[OrderDetail])
            orderWideList.append(new OrderWide(orderInfo, orderDetail))
          }
        }


      } else {
        //4.2.2  无主表 有从表
        val orderDetail: OrderDetail = orderDetailOpt.get
        // 1） 从表写缓存
        //TODO 补齐
        //设计从表的缓存  type？  set  1:n       key?  ORDER_JOIN:ORDER_DETAIL:101  value?   orderDetailJsons    field/score?  无
        // 读取？ smembmers   写入？sadd     过期？ 10分钟
        val orderDetailKey = s"ORDER_JOIN:ORDER_DETAIL:${orderDetail.order_id}"
        val orderDetailJson = JSON.toJSONString(orderDetail,new SerializeConfig(true))
        jedis.sadd(orderDetailKey, orderDetailJson);
        jedis.expire(orderDetailKey, 10 * 60)


        // 2）从表查缓存  如果能查到 获得合并结果
        val orderInfoKey = s"ORDER_JOIN:ORDER_INFO:${orderDetail.order_id}"
        val orderInfoJson: String = jedis.get(orderInfoKey)
        val orderInfo: OrderInfo = JSON.parseObject(orderInfoJson, classOf[OrderInfo])
        val orderWide = new OrderWide(orderInfo, orderDetail)
        orderWideList.append(orderWide)
      }
      jedis.close()
      orderWideList
    }




    orderWideDstream.cache()
    orderWideDstream.print(1000)
    //TODO
    //6 写入到es中
    // 6.1  es 建索引？建索引模板
    // 6.2  写入es  1  、批量写入  2、精确一次   后置提交偏移量+ 幂等性保存

    orderWideDstream.foreachRDD{rdd=>

      rdd.foreachPartition{orderWideItr=>
        val orderWideList: List[(OrderWide)] = orderWideItr.toList
        if(orderWideList!=null&&orderWideList.size>0){
          val orderWide: OrderWide = orderWideList(0)
          val orderWideWithIdList: List[(String, OrderWide)] = orderWideList.map(orderWide=>(orderWide.detail_id.toString,orderWide))

          val indexName=s"gmall0722_order_wide_${orderWide.create_date}"
          MyEsUtil.saveBulk(orderWideWithIdList,indexName)
        }
      }
      //c  driver
      OffsetManager.saveOffset(orderInfoTopic,groupId,orderInfoOffsetRanges)
      OffsetManager.saveOffset(orderDetailTopic,groupId,orderDetailOffsetRanges)
    }


    val esHost: String = MyPropertiesUtil.getProperty("es.host")
    val esPort: String = MyPropertiesUtil.getProperty("es.port")
    import org.elasticsearch.spark._
    orderWideDstream.foreachRDD{rdd=>
      val confMap = Map("es.nodes" -> esHost,
        "es.port" -> esPort,
        "es.index.auto.create" -> "true",
        "es.nodes.wan.only" -> "true",
        "es.mapping.id" -> "mid"
      )

      rdd.saveToEs(s"gmall2022_dau_info_${LocalDate.now()}/_doc",confMap)
      orderInfoInputDstreamWithOffset.asInstanceOf[CanCommitOffsets].commitAsync( orderInfoOffsetRanges)
      orderDetailInputDstream.asInstanceOf[CanCommitOffsets].commitAsync( orderDetailOffsetRanges)
    }



    ssc.start()
    ssc.awaitTermination()
  }

}
