package com.atguigu.dw.gmall.realtime.app

import java.util
import java.util.Properties

import com.alibaba.fastjson.JSON
import com.atguigu.constan.ConstanVal
import com.atguigu.dw.gmall.realtime.bean.{OrderDetail, OrderInfo, SaleDetail, UserInfo}
import com.atguigu.dw.gmall.realtime.util.{EsUtil, MyKafkaUtil, RedisUtil}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
import org.json4s.DefaultFormats
import org.json4s.jackson.Serialization
import redis.clients.jedis.Jedis

import scala.collection.mutable

/**
 * description ：订单销售 app
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/2/18 21:29
 * modified By ：
 * version:    : 1.0
 */
object SaleApp extends App {

  private val properties = new Properties()
  properties.setProperty("user", ConstanVal.MYSQL_CLIENT_USER)
  properties.setProperty("password", ConstanVal.MYSQL_CLIENT_PASSWORD)

  override def handle(ssc: StreamingContext) = {
    // 读取 kafkaStream
    val orderSourceStream = MyKafkaUtil.getKafkaStream(ssc, Set[String](ConstanVal.KAFKA_TOPIC_ORDER))
    val oerderDetailSourceSteam = MyKafkaUtil.getKafkaStream(ssc, Set[String](ConstanVal.KAFKA_TOPIC_ORDER_DETAIL))

    // 提取 value 并封装成样例类, 由于需要双流，所以需要将数据封装成 kv 类型, k 统一为 orderId
    val oderInfoStream = orderSourceStream.map {
      case (_, oderJsonString) =>
        val oderInfo = JSON.parseObject(oderJsonString, classOf[OrderInfo])
        (oderInfo.id, oderInfo)
    }
    val orderDetailStream = oerderDetailSourceSteam.map {
      case (_, orderDetailJsonString) =>
        val orderDetail = JSON.parseObject(orderDetailJsonString, classOf[OrderDetail])
        (orderDetail.order_id, orderDetail)
    }

    // 双流 join
    val joinValue: DStream[(String, (Option[OrderInfo], Option[OrderDetail]))] = oderInfoStream.fullOuterJoin(orderDetailStream)

    // 情况 1) 如果订单表有数据，不管详情是否有数据
    // 情况 2) 如果订单表没有数据, 订单详情表有数据
    // 情况 3) 订单和详情表都没有数据--这种情况不存在

    // 处理 join 结果
    val saleOrderStream: DStream[SaleDetail] = joinValue.mapPartitions(it => {
      val client = RedisUtil.getJedisClientSingle // 获取 redis 连接
      val result = it.flatMap {
        case (_, (Some(orderInfo), opt)) => // 情况 1
          // 1.1) 缓存 oderInfo 的数据到 redis 中，方便后面订单详情进行表连接
          cacheOderInfo(client, orderInfo)

          // 1.2) 先去查询 redis 中订单表详情的数据，如果找到就进行数据的连接，因为 redis 中可能有订单详情的表数据先到了
          import scala.collection.JavaConversions._
          val orderDetailsSet: util.Set[String] = client.keys(s"order:detail:${orderInfo.id}:*")
          val resultSet: mutable.Set[SaleDetail] = orderDetailsSet.map(key => {
            var detail = null
            try {
              detail = JSON.parseObject(client.get(key), classOf[OrderDetail])
              if (null != detail) {
                client.del(key)
              }
            } catch {
              case _ => SaleDetail() // transform error nothing to do
            }
            SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(detail)
          })
          // 1.3) 如果 orderDetail 不为空，就将其跟 orderInfo 连接，并合并 1.2 的结果
          if (opt.isDefined) {
            resultSet += SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(opt.get)
          }
          resultSet
        case (_, (None, Some(orderDetail: OrderDetail))) => // 情况 2
          // 2.1) 去查询 redis 如果发现有订单的数据，就进行连接，如果没有就将订单详情表的数据缓存到 redis中
          val key = s"order:info:${orderDetail.order_id}"
          if (client.exists(key)) {
            try {
              val orderInfoString = client.get(key)
              val orderInfo = JSON.parseObject(orderInfoString, classOf[OrderInfo])
              SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail) :: Nil
            } catch {
              case _ => Nil // transform error nothing to do
            }
          } else {
            cacheOrderDetail(client, orderDetail)
            Nil
          }
      }
      client.close() // 关闭 redis 连接
      result
    })


    // 读取 msyql 表补全用户信息
    val spark = SparkSession.builder().config(ssc.sparkContext.getConf).getOrCreate()
    import spark.implicits._
    val saleOrderUserStream: DStream[SaleDetail] = saleOrderStream.transform(rdd => {

      // 转换 userInfo
      val userInfos: RDD[(String, UserInfo)] = spark
        .read
        .jdbc(ConstanVal.MYSQL_CLIENT_URL, ConstanVal.MYSQL_TABLE_USER_INFO, properties)
        .as[UserInfo]
        .rdd
        .map {
          userInfo => (userInfo.id, userInfo)
        }

      // 转换 seleInfo
      val saleInfos: RDD[(String, SaleDetail)] = rdd.map {
        sale => (sale.user_id, sale)
      }

      // 补全用户信息
      saleInfos
        .join(userInfos)
        .map {
          case (_, (saleDetail: SaleDetail, userInfo: UserInfo)) => saleDetail.mergeUserInfo(userInfo)
        }
    })

    // 写入 es
    saleOrderUserStream.foreachRDD {
      rdd =>
        EsUtil.insertBulk(ConstanVal.ES_INDEX_GMALL0830_SALE_DETAIL, rdd.collect)
    }
    saleOrderUserStream.print(10000)
  }

  private def cacheOrderDetail(client: Jedis, orderDetail: OrderDetail) = {
    client.setex(s"order:detail:${orderDetail.order_id}:${orderDetail.id}", 60 * 10, Serialization.write(orderDetail)(DefaultFormats))
  }

  private def cacheOderInfo(client: Jedis, orderInfo: OrderInfo) = {
    client.setex(s"order:info:${orderInfo.id}", 60 * 10, Serialization.write(orderInfo)(DefaultFormats))
  }

}

/*
 redis key
 orderInfo
    key : order:info:orderId
    value : oderinfo
 orderDetail
    key : order:detail:orderId:orderDetail.id
    value orderDetail
 */
