package com.yanggu.bigdata.realtime.app.dwm

import cn.hutool.core.date.{DateTime, DateUtil}
import cn.hutool.core.thread.NamedThreadFactory
import cn.hutool.json.JSONUtil
import com.yanggu.bigdata.realtime.bean.{OrderDetail, OrderInfo, OrderWide}
import com.yanggu.bigdata.realtime.common.GmallConfig.KAFKA_BROKER_LIST
import com.yanggu.bigdata.realtime.utils.{DimQueryUtil, KafkaUtil}
import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.async.{AsyncFunction, ResultFuture}
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.util.Collector

import java.util
import java.util.concurrent.{ArrayBlockingQueue, CompletableFuture, ThreadPoolExecutor, TimeUnit}

/**
 * DWM层订单明细宽表
 * 使用订单表JOIN订单明细表, 同时使用维度表来补充明细表的数据
 * 外部数据源的查询常常是流式计算的性能瓶颈
 * 因此常见的做法有旁路缓存和异步IO
 */

/**
 * 数据流向 client -> Springboot -> MySQL -> FlinkApp -> Kafka(ods_base_db) -> FlinkApp -> Kafka(dwd_事实表)、Hbase(DIM) -> FlinkApp -> Kafka(dwm_事实宽表)
 * 程 序 MockDB -> MySQL -> flink-cdc -> Kafka(ods_base_db) -> Flink(BaseDBApp) -> Kafka(dwd_事实表)、Hbase(DIM) -> Flink(OrderWideApp) -> Kafka(dwm_事实宽表)
 *                                       Kafka、zk                                 Kafka、zk、hbase、hdfs、phoenix      redis、phoenix       Kafka、zk
 */
object OrderWideNewApp {

  def main(args: Array[String]): Unit = {
    //1. 设置执行环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    //2.读取 Kafka 订单和订单明细主题数据
    //2.1 订单表 读取topic:dwd_order_info中的数据, 提取Watermark
    val keyedOrderInfoDataStream = KafkaUtil.getKafkaDataStream(env, KAFKA_BROKER_LIST, "dwd_order_info", "OrderWideApp")
      .map(data => {
        val orderInfo = JSONUtil.toBean(data, classOf[OrderInfo])
        val createTime = DateUtil.parseDateTime(orderInfo.getCreate_time)
        //设置创建日期
        orderInfo.setCreate_date(DateUtil.formatDate(createTime))
        //设置小时
        orderInfo.setCreate_hour(DateUtil.formatTime(createTime))
        //设置创建时间戳
        orderInfo.setCreate_ts(createTime.getTime)
        orderInfo
      })
      //提取Watermark
      .assignTimestampsAndWatermarks(WatermarkStrategy.forMonotonousTimestamps[OrderInfo]
        .withTimestampAssigner(new SerializableTimestampAssigner[OrderInfo] {
          override def extractTimestamp(element: OrderInfo, recordTimestamp: Long): Long = element.getCreate_ts
        })
      ).setParallelism(1)
      //按照orderId进行分区
      .keyBy(_.getId)

    //2.2 订单明细 读取topic:dwd_order_detail中的数据, 提取Watermark
    val keyedOrderDetailDataStream = KafkaUtil.getKafkaDataStream(env, KAFKA_BROKER_LIST, "dwd_order_detail", "OrderWideApp")
      .map(data => {
        val orderDetail = JSONUtil.toBean(data, classOf[OrderDetail])
        val createTime = DateUtil.parseDateTime(orderDetail.getCreate_time)
        //设置创建日期
        orderDetail.setCreate_ts(createTime.getTime)
        orderDetail
      })
      //提取Watermark
      .assignTimestampsAndWatermarks(WatermarkStrategy.forMonotonousTimestamps[OrderDetail]
        .withTimestampAssigner(new SerializableTimestampAssigner[OrderDetail] {
          override def extractTimestamp(element: OrderDetail, recordTimestamp: Long): Long = element.getCreate_ts
        })
      ).setParallelism(1)
      //按照orderId进行分区
      .keyBy(_.getOrder_id)

    //订单流去intervalJoin订单明细流
    val orderWideDataStream = keyedOrderInfoDataStream
      .intervalJoin(keyedOrderDetailDataStream)
      .between(Time.seconds(-10L), Time.seconds(5L))
      .process((left: OrderInfo, right: OrderDetail, _: ProcessJoinFunction[OrderInfo, OrderDetail, OrderWide]#Context, out: Collector[OrderWide]) => {
        out.collect(new OrderWide(left, right))
      }).setParallelism(1)

    //控制台打印输出订单宽表数据
    //orderWideDataStream.print("OrderWideData>>>>>").setParallelism(1)

    //3. 补充订单宽表事实表的维度数据
    val orderWideWithDimInfo = AsyncDataStream.unorderedWait(orderWideDataStream, new AsyncFunction[OrderWide, OrderWide] {

      //自定义线程池
      private lazy val executor = {
        val threadPoolExecutor = new ThreadPoolExecutor(5, 5, 100L, TimeUnit.SECONDS,
          new ArrayBlockingQueue[Runnable](100), new NamedThreadFactory("add-dim-data", false),
          new ThreadPoolExecutor.DiscardPolicy())
        //直接初始化所有核心线程
        threadPoolExecutor.prestartAllCoreThreads()
        threadPoolExecutor
      }

      override def asyncInvoke(input: OrderWide, resultFuture: ResultFuture[OrderWide]): Unit = {

        val list = new util.ArrayList[CompletableFuture[Void]](6)

        //用户维度信息补充
        val userFuture = CompletableFuture.runAsync(() => {
          val dimInfo = DimQueryUtil.getDimInfo(tableName = "DIM_USER_INFO", pkValue = input.getUser_id.toString)
          if (dimInfo == null) {
            return
          }
          val age = DateTime.now().year() - DateUtil.parseDateTime(dimInfo.getString("BIRTHDAY")).year()
          input.setUser_age(age)
          input.setUser_gender(dimInfo.getString("GENDER"))
        }, executor)
        list.add(userFuture)

        //省份维度信息补充
        val provinceFuture = CompletableFuture.runAsync(() => {
          val dimInfo = DimQueryUtil.getDimInfo("DIM_BASE_PROVINCE", pkValue = input.getProvince_id.toString)
          if (dimInfo == null) {
            return
          }
          //name、area_code、iso_code、iso_3166_2
          input.setProvince_name(dimInfo.getString("NAME"))
          input.setProvince_area_code(dimInfo.getString("AREA_CODE"))
          input.setProvince_iso_code(dimInfo.getString("ISO_CODE"))
          input.setProvince_3166_2_code(dimInfo.getString("ISO_3166_2"))
        }, executor)
        list.add(provinceFuture)

        //SKU维度信息补充
        val skuFuture = CompletableFuture.runAsync(() => {
          val dimInfo = DimQueryUtil.getDimInfo("DIM_SKU_INFO", pkValue = input.getSku_id.toString)
          if (dimInfo == null) {
            return
          }
          input.setSku_name(dimInfo.getString("SKU_NAME"))
          input.setSpu_id(dimInfo.getLong("SPU_ID"))
          input.setCategory3_id(dimInfo.getLong("CATEGORY3_ID"))
          input.setTm_id(dimInfo.getLong("TM_ID"))
        }, executor)
        list.add(skuFuture)

        //SPU维度信息补充
        val spuFuture = skuFuture.thenRunAsync(() => {
          if (input.getSpu_id == null) {
            return
          }
          val dimInfo = DimQueryUtil.getDimInfo("DIM_SPU_INFO", pkValue = input.getSpu_id.toString)
          if (dimInfo == null) {
            return
          }
          input.setSpu_name(dimInfo.getString("SPU_NAME"))
        }, executor)
        list.add(spuFuture)

        //品牌维度信息补充
        val tmFuture = skuFuture.thenRunAsync(() => {
          if (input.getTm_id == null) {
            return
          }
          val dimInfo = DimQueryUtil.getDimInfo("DIM_BASE_TRADEMARK", pkValue = input.getTm_id.toString)
          if (dimInfo == null) {
            return
          }
          input.setTm_name(dimInfo.getString("TM_NAME"))
        }, executor)
        list.add(tmFuture)

        //3级分类维度信息补充
        val category3Future = skuFuture.thenRunAsync(() => {
          if (input.getCategory3_id == null) {
            return
          }
          val dimInfo = DimQueryUtil.getDimInfo("DIM_BASE_CATEGORY3", pkValue = input.getCategory3_id.toString)
          if (dimInfo == null) {
            return
          }
          input.setCategory3_name(dimInfo.getString("NAME"))
        }, executor)
        list.add(category3Future)

        val array = new Array[CompletableFuture[Void]](list.size())

        //scala调用java的可变参数
        //https://blog.csdn.net/eases_stone/article/details/85096585
        //等待所有线程执行完成后, 向下游传递数据或者异常
        CompletableFuture.allOf(list.toArray(array): _*)
          .whenCompleteAsync((_, exception) => {
            if (exception != null) {
              exception.printStackTrace()
              resultFuture.completeExceptionally(exception)
              return
            }
            resultFuture.complete(Iterable(input))
          })
      }
    }, 6000L, TimeUnit.SECONDS, 5)

    //4. 输出
    //把数据输出到Kafka的dwm层, 订单宽表
    orderWideWithDimInfo.print("orderWideWithDimInfo>>>>>>").setParallelism(1)
    orderWideWithDimInfo
      .map(JSONUtil.toJsonStr(_))
      .addSink(KafkaUtil.getKafkaSink(KAFKA_BROKER_LIST, "dwm_order_wide"))

    //5. 启动Job
    env.execute("OrderWideApp Job")

  }

}
