package com.shellyan.gmall.rt.app

import java.lang

import com.alibaba.fastjson.JSON
import com.atguigu.realtime.gmall.common.Constant
import com.shellyan.gmall.rt.bean.{OrderDetail, OrderInfo, SaleDetail, UserInfo}
import com.shellyan.gmall.rt.util.{ESUtil, RedisUtil}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.DStream
import org.json4s.jackson.Serialization
import redis.clients.jedis.Jedis

import scala.collection.JavaConverters._

/**
 * @author Shelly An
 * @create 2020/9/11 9:05
 *         需要两个流，因此BaseApp不能满足需求，提升一下，写一个Base2App
 */
object SaleDetail2App extends Base2App {
  override val master: String = "local[2]"
  override val appName: String = "SaleDetailApp"
  override val batchTime: Int = 3
  override val topics: Set[String] = Set(Constant.ORDER_INFO_TOPIC, Constant.ORDER_DETAIL_TOPIC)
  override val groupId: String = "SaleDetailApp"

  //把orderInfo的数据缓存到redis中
  def cacheOrderInfo(client: Jedis, orderInfo: OrderInfo): String = {
    implicit val f = org.json4s.DefaultFormats
    //保存30min
    client.setex(s"order_info:${orderInfo.id}", 60 * 30, Serialization.write(orderInfo))
  }

  //缓存orderDetail
  def cacheOrderDetail(client: Jedis, orderDetail: OrderDetail): lang.Long = {
    implicit val f = org.json4s.DefaultFormats
    client.hset(s"order_detail:${orderDetail.order_id}", orderDetail.id, Serialization.write(orderDetail))
    client.expire(s"order_detail:${orderDetail.order_id}", 60 * 30)
  }

  /**
   * 解决方式2
   */
  def joinOrderInfoOrderDetail(orderInfoStream: DStream[OrderInfo], orderDetailStream: DStream[OrderDetail]): DStream[SaleDetail] = {
    val orderIdToOrderInfoStream: DStream[(String, OrderInfo)] = orderInfoStream.map((info: OrderInfo) => (info.id, info))
    val orderIdToOrderDetailStream: DStream[(String, OrderDetail)] = orderDetailStream.map((detail: OrderDetail) => (detail.order_id, detail))

    //必须使用fullJoin
    val value: DStream[SaleDetail] = orderIdToOrderInfoStream.fullOuterJoin(orderIdToOrderDetailStream)
      .mapPartitions {
        it: Iterator[(String, (Option[OrderInfo], Option[OrderDetail]))] => {
          val client: Jedis = RedisUtil.getClient

          val result: Iterator[SaleDetail] = it.flatMap {
            //some some
            case (orderId, (Some(orderInfo: OrderInfo), Some(orderDetail: OrderDetail))) =>
              println("some some")
              //1. 把orderInfo写缓存
              cacheOrderInfo(client, orderInfo)
              //2. 合并成一个SaleDetail
              val saleDetail: SaleDetail = SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail)
              //3. 去orderDetail的缓存中读取数据
              if (client.exists(s"order_detail:${orderInfo.id}")) {
                val t: List[SaleDetail] = client
                  .hgetAll(s"order_detail:${orderInfo.id}")
                  .asScala.map {
                  case (orderDetailID, orderDetailJson) =>
                    val od: OrderDetail = JSON.parseObject(orderDetailJson, classOf[OrderDetail])
                    SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(od)
                }.toList :+ saleDetail
                client.del(s"order_detail:${orderInfo.id}")
                t
              } else {
                saleDetail :: Nil
              }
            //none some
            case (orderId, (None, Some(orderDetail: OrderDetail))) =>
              println("none some")
              if (client.exists(s"order_info:${orderDetail.id}")) {
                //order_id存在
                val json: String = client.get(s"order_info:${orderDetail.id}")
                val orderInfo: OrderInfo = JSON.parseObject(json, classOf[OrderInfo])
                SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail) :: Nil
              } else {
                //order_id不存在
                cacheOrderDetail(client, orderDetail)
                Nil
              }


            //some none
            case (orderId, (Some(orderInfo: OrderInfo), None)) =>
              println("some none")
              //1. 把orderInfo写缓存
              cacheOrderInfo(client, orderInfo)
              //3. 去orderDetail的缓存中读取数据
              if (client.exists(s"order_detail:${orderInfo.id}")) {
                val t: List[SaleDetail] = client
                  .hgetAll(s"order_detail:${orderInfo.id}")
                  .asScala.map {
                  case (orderDetailID, orderDetailJson) =>
                    val od: OrderDetail = JSON.parseObject(orderDetailJson, classOf[OrderDetail])
                    SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(od)
                }.toList
                client.del(s"order_detail:${orderInfo.id}")
                t
              } else {
                Nil
              }
          }
          client.close()

          result
        }
      }
    value
  }

  //join user_info 维度表：早于事实表存在，变化缓慢
  def joinUser(saleDetail: DStream[SaleDetail]): DStream[SaleDetail] = {
    /*
    直接反查mysql，不太合理。业务数据库不可以这么查。两种解决办法
    1. 将存量写入hbase，使用canal监控mysql维度表，监控后面的变化，每次查hbase
    2. 使用maxwell，init功能，可以实现监控到存量的数据
     */
    //1. 先查询到user_info信息  sparksql ->df/ds ->rdd
    //使用Spark-sql读
    val spark: SparkSession = SparkSession.builder()
      .config(ssc.sparkContext.getConf)
      .getOrCreate()

    import spark.implicits._

    def readUserInfoes(ids: String): RDD[(String, UserInfo)] = {

      spark.read
        .format("jdbc")
        .option("url", "jdbc:mysql://hadoop102:3306/gmall0421?useSSL=false")
        .option("user", "root")
        .option("password", "123456")
        .option("query", s"select * from user_info where id in (${ids})")
        .load()
        .as[UserInfo]
        .rdd
        .map((userinfo: UserInfo) => (userinfo.id, userinfo))

    }

    //2. saleDetail和user信息进行join sparkstreaming ->rdd

    saleDetail.transform((rdd: RDD[SaleDetail]) => {
      //后面rdd使用多次，必须加缓存！！！
      // 否则靠后的rdd消费不到数据。而且加缓存可以提高性能
      rdd.cache()

      //前缀，间隔，后缀
      val ids: String = rdd.map(_.user_id).collect().mkString("'", "','", "'")
      //每个批次都要第一次user数据
      val userInfoRDD: RDD[(String, UserInfo)] = readUserInfoes(ids)

      rdd.map((saleDetail: SaleDetail) => (saleDetail.user_id, saleDetail))
        .join(userInfoRDD)
        .map {
          case (_, (saleDetail, userInfo)) =>
            saleDetail.mergeUserInfo(userInfo)
        }
    })
  }

  def write2ES(saleDetailWithUser: DStream[SaleDetail]): Unit = {
    saleDetailWithUser.foreachRDD(rdd => {
      rdd.foreachPartition(it => {
        ESUtil.insertBulk(
          Constant.INDEX_SALE_DETAIL,
          it.map(sale => (sale.order_detail_id, sale)))
      })
    })
  }

  override def run(streams: Map[String, DStream[String]]): Unit = {
    val orderInfoStream: DStream[OrderInfo] = streams(Constant.ORDER_INFO_TOPIC)
      .map((json: String) => JSON.parseObject(json, classOf[OrderInfo]))
    val orderDetailStream: DStream[OrderDetail] = streams(Constant.ORDER_DETAIL_TOPIC)
      .map((json: String) => JSON.parseObject(json, classOf[OrderDetail]))
    orderInfoStream.cache()
    orderDetailStream.cache()
    //1. 对两个流进行join内连接
    // join 内连接  leftJoin 左连接  rightJoin 右连接  fullJoin 全集
    val saleDetail: DStream[SaleDetail] = joinOrderInfoOrderDetail(orderInfoStream, orderDetailStream)
    //    saleDetail.print()
    //2. join user
    val saleDetailWithUser: DStream[SaleDetail] = joinUser(saleDetail)
    //3. 写数据到ES
    write2ES(saleDetailWithUser)
  }
}
