package com.gmall.app

import com.alibaba.fastjson.JSON
import com.gmall.bean.{OrderDetail, OrderInfo, SaleDetail, UserInfo}
import com.gmall.utils.{Constants, MyESUtil, MyRedisUtil}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.DStream
import org.json4s.JsonAST.JString
import org.json4s.{CustomSerializer, Formats, JDouble}
import org.json4s.jackson.{JsonMethods, Serialization}
import redis.clients.jedis.Jedis

import scala.collection.JavaConverters._

object SaleDetailApp extends BaseAppV2 {
    override val appName: String = "SaleDetailApp"
    override val master: String = "local[*]"
    override val batchTime: Int = 3
    override val groupId: String = "SaleDetailApp"
    override val topics: Set[String] = Set(Constants.ORDER_INFO_TOPIC, Constants.ORDER_DETAIL_TOPIC)

    override def run(ssc: StreamingContext, topicAndStream: Map[String, DStream[String]]): Unit = {
        // 读取两个流, 并封装数据
        val orderInfoStream: DStream[(String, OrderInfo)] = topicAndStream(Constants.ORDER_INFO_TOPIC)
          .map(jsonStr => {
              implicit val f: Formats = org.json4s.DefaultFormats + StringToDouble
              val orderInfo = JsonMethods.parse(jsonStr).extract[OrderInfo]
              println(jsonStr)
              //val orderInfo = JSON.parseObject(jsonStr, classOf[OrderInfo])
              (orderInfo.id, orderInfo)
          })
        val orderDetailStream: DStream[(String, OrderDetail)] = topicAndStream(Constants.ORDER_DETAIL_TOPIC)
          .map(jsonStr => {
              implicit val f: Formats = org.json4s.DefaultFormats + StringToDouble
              val orderDetail = JsonMethods.parse(jsonStr).extract[OrderDetail]
              //val orderDetail = JSON.parseObject(jsonStr, classOf[OrderDetail])
              (orderDetail.order_id, orderDetail)
          })
        //1. 对两个流进行full join, 返回值就是join后的流
        // 采用滑窗+去重的方式
        val saleDetailStream = fullJoin_1(orderInfoStream, orderDetailStream)
        saleDetailStream.print(100)

        // val saleDetailStream = fullJoin_2(orderInfoStream, orderDetailStream)
        // saleDetailStream.print(100)

        // join用户信息
        val saleDetailWithUserStream = joinUser(ssc, saleDetailStream)
        // 3. 把数据写入到es
        writeToES(saleDetailWithUserStream)
    }

    /**
     * 自定义String => Double解析器
     */
    val StringToDouble: CustomSerializer[Double] = new CustomSerializer[Double](format => ( {
        case JString(s) => s.toDouble
    }, {
        case d: Double => JDouble(d)
    }
    ))

    /**
     * 采用 滑窗 + redis去重 的方式
     *
     * @param orderInfoStream
     * @param orderDetailStream
     * @return DStream[SaleDetail]
     */
    def fullJoin_1(orderInfoStream: DStream[(String, OrderInfo)],
                   orderDetailStream: DStream[(String, OrderDetail)]): DStream[SaleDetail] = {
        // 1、给两个流添加窗口，要求两个流的批次大小一致
        val orderInfoStreamWithWindow = orderInfoStream.window(Seconds(12), Seconds(3))
        val orderDetailStreamWithWindow = orderDetailStream.window(Seconds(12), Seconds(3))
        // 2、进行join
        val saleDetailStream = orderInfoStreamWithWindow
          .join(orderDetailStreamWithWindow)
          // 将orderInfo和orderDetail的数据进行合并到SaleDetail
          .map {
              case (orderId, (orderInfo, orderDetail)) =>
                  SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail)
          }
          // 以分区为单位将SaleDetail写入redis去重
          .mapPartitions(iter => {
              val client = MyRedisUtil.getJedisClient
              val result = iter.filter(saleDetail => {
                  // set的key：可以体现时间, 但是会导致key过多，不好管理
                  // 如果数据量大，可以设置一个月为一个周期
                  1 == client.sadd("gmallRealtime", s"${saleDetail.order_id}:${saleDetail.order_detail_id}")
              })
              client.close()
              result
          })
        saleDetailStream
    }


    /**
     *
     * @param orderInfoStream
     * @param orderDetailStream
     * @return
     */
    def fullJoin_2(orderInfoStream: DStream[(String, OrderInfo)],
                   orderDetailStream: DStream[(String, OrderDetail)]): DStream[SaleDetail] = {
        // DStream[SaleDetail]
        orderInfoStream
          .fullOuterJoin(orderDetailStream)
          .mapPartitions(it => {
              val client: Jedis = MyRedisUtil.getJedisClient
              val result = it.flatMap {
                  // some some
                  case (orderId, (Some(orderInfo), Some(orderDetail))) =>
                      println("some some")
                      // 1. 缓存order_info
                      cacheOrderInfo(client, orderInfo)
                      // 2. orderInfo orderDetail合并成一个saleDetail
                      val saleDetail = SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail)
                      // 3. 去order_detail的缓存中找到这个order对应的所有order_detail
                      if (client.exists("order_detail:" + orderId)) {
                          val t = client
                            .hgetAll("order_detail:" + orderId)
                            .asScala
                            .map {
                                case (oderDetailId, jsonString) =>
                                    val orderDetail = JSON.parseObject(jsonString, classOf[OrderDetail])
                                    SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail)
                            }
                            .toList :+ saleDetail
                          client.del("order_detail:" + orderId)
                          t
                      } else {
                          saleDetail :: Nil
                      }
                  // some none
                  case (orderId, (Some(orderInfo), None)) =>
                      println("some none")
                      // 1. 缓存order_info
                      cacheOrderInfo(client, orderInfo)
                      // 3. 去order_detail的缓存中找到这个order对应的所有order_detail
                      if (client.exists("order_detail:" + orderId)) {
                          val t = client
                            .hgetAll("order_detail:" + orderId)
                            .asScala
                            .map {
                                case (oderDetailId, jsonString) =>
                                    val orderDetail = JSON.parseObject(jsonString, classOf[OrderDetail])
                                    SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail)
                            }
                            .toList
                          client.del("order_detail:" + orderId)
                          t
                      } else {
                          Nil
                      }
                  // none some
                  case (orderId, (None, Some(orderDetail))) =>
                      println("none some")
                      // 1. 去orderInfo的缓存中查找对应的orderInfo信息
                      if (client.exists("order_info:" + orderDetail.order_id)) {
                          // 2. 如果找到, 就join, 返回join后的结果
                          val orderInfoString: String = client.get("order_info:" + orderDetail.order_id)
                          val orderInfo = JSON.parseObject(orderInfoString, classOf[OrderInfo])
                          SaleDetail().mergeOrderInfo(orderInfo).mergeOrderDetail(orderDetail) :: Nil
                      } else {
                          // 3. 如果没有找到, 返回空  null, 并且把orderDetail信息写入到缓存
                          cacheOrderDetail(client, orderDetail)
                          Nil
                      }
              }
              client.close()
              result
          })

    }


    /*
        order_info信息如何缓存:
        key                                 value
        "order_info:" + order_id            order_info的信息变成json字符串存入
        例如：order_info:1                        {"": "", ...}

        -------------

        order_detail信息如何缓存: (1)用set存储
        key                                                   value
        "order_detail:" + order_id + order_detail_id           json字符串

        val keys = keys("order_detail:" + order_id*")


        order_detail信息如何缓存: (2)用map存储
        1 对 多
        key                                 value map
        "order_detail:" + order_id           field                  value
                                             order_detail_id        json字符串
                                             order_detail_id        json字符串
     */

    /**
     * 将OrderInfo缓存到redis
     * @param client
     * @param orderInfo
     * @return
     */
    def cacheOrderInfo(client: Jedis, orderInfo: OrderInfo) = {
        implicit val f = org.json4s.DefaultFormats
        val jsonStr = Serialization.write(orderInfo)
        client.setex("order_info:" + orderInfo.id, 30 * 60, jsonStr)
    }

    /**
     * 将OrderDetail缓存到redis
     *
     * @param client
     * @param orderDetail
     * @return
     */
    def cacheOrderDetail(client: Jedis, orderDetail: OrderDetail) = {
        implicit val f = org.json4s.DefaultFormats
        val jsonStr = Serialization.write(orderDetail)
        // 一个订单info可能对应多个order_detail，所以用map存储较为合适
        client.hset("order_detail:" + orderDetail.order_id, orderDetail.id, jsonStr)
    }

    /**
     * 把用户信息join到saleDetail中
     * 使用spark-sql来从msyql读数据 df/ds
     * 把流和df/ds转成rdd之后进行join
     *
     * @param ssc
     * @param saleDetailStream
     */
    def joinUser(ssc: StreamingContext, saleDetailStream: DStream[SaleDetail]): DStream[SaleDetail] = {
        val spark = SparkSession.builder()
          .config(ssc.sparkContext.getConf)
          .getOrCreate()
        import spark.implicits._
        // 返取user信息，先去读取到user信息，每个周期执行一次
        // 这对mysql压力大，解决方法：可以将数据一次导入到Hbase，然后canal监控mysql变化
        val readUserInfo = (ids: List[String]) => {
            spark
              .read
              .format("jdbc")
              .option("url", "jdbc:mysql://hadoop102:3306/gmall0523?useSSL=false")
              .option("user", "root")
              .option("password", "123456")
              .option("query", s"select * from user_info where id in ('${ids.mkString("','")}')")
              .load()
              .as[UserInfo]
              .rdd
              .map(userInfo => (userInfo.id, userInfo))
        }

        saleDetailStream.transform(rdd => {
            // 每个批次执行一次
            // rdd后面会使用多次, 所以做缓存
            rdd.cache()
            // 把这个批次中所有的user_id拿出来
            val ids = rdd.map(_.user_id).collect().toSet.toList
            val userInfoRdd: RDD[(String, UserInfo)] = readUserInfo(ids)

            rdd //rdd里面的代码，在executor执行
              .map(saleDetail => (saleDetail.user_id, saleDetail))
              .join(userInfoRdd)
              .map {
                  case (userId, (saleDetail, userInfo)) =>
                      saleDetail.mergeUserInfo(userInfo)
              }
        })
    }


    /**
     * 将最后封装好的SaleDetail数据保存到ES
     * @param saleDetailWithUserStream
     */
    def writeToES(saleDetailWithUserStream: DStream[SaleDetail]): Unit = {
        saleDetailWithUserStream.foreachRDD(rdd => {
            rdd.foreachPartition(sdIt => {
                MyESUtil.insertBulk("gmall_sale_detail", sdIt.map(sd => (sd.order_id + "_" + sd.order_detail_id, sd)))
            })
        })
    }


}
