import com.cw.realtime.common.base.FlinkRunner.{RunnerConfig, run}
import com.cw.realtime.common.bean.TradeProvinceOrderBean
import com.cw.realtime.common.constant.Constant.{DWS_TRADE_PROVINCE_ORDER_WINDOW, TOPIC_DWD_TRADE_ORDER_DETAIL}
import com.cw.realtime.common.util.{Cache4Dim, FlinkSinkUtil}
import io.circe.syntax._
import io.circe.{JsonObject, parser}
import io.circe.generic.auto._
import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.api.common.state.{StateTtlConfig, ValueState, ValueStateDescriptor}
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

import java.time.Duration
import scala.collection.mutable
import scala.util.Try

object DwsTradeProvinceOrder {

  def main(args: Array[String]): Unit = {

    implicit val conf = RunnerConfig("dws_trade_province_order_window", TOPIC_DWD_TRADE_ORDER_DETAIL, 10028, ckp = true, parallelism = 1)
    run { (_, ds) =>
      val jsonObjectStream = etl(ds)

      val jsonObjectStreamWM = withWatermark(jsonObjectStream)

      // 修复因回撤流导致多余的订单金额
      val provinceOrderStream = fixRetraction(jsonObjectStreamWM)

      // 开窗聚合
      val aggStream = windowAggregate(provinceOrderStream)

//      aggStream.print()
      dimAssociation(aggStream)
        .map(_.asJson.noSpaces)
        .sinkTo(FlinkSinkUtil.getDorisSink(DWS_TRADE_PROVINCE_ORDER_WINDOW))



    }


  }

  def dimAssociation(ds: DataStream[TradeProvinceOrderBean]) = {
    ds.map(new RichMapFunction[TradeProvinceOrderBean, TradeProvinceOrderBean] {
      var cache = Option.empty[Cache4Dim]

      override def open(parameters: Configuration): Unit =
        cache = Some(new Cache4Dim)

      override def close(): Unit = cache.foreach {_.close()}

      override def map(value: TradeProvinceOrderBean): TradeProvinceOrderBean = cache.fold(value) { cache =>
        val province_id = value.province_id
        val baseProvince = cache.get("dim_base_province", province_id)
        val province_name = baseProvince("name")
        value.transProvinceName(province_name)
      }
    })
  }


  def windowAggregate(ds: DataStream[TradeProvinceOrderBean]) = {
    ds.keyBy(_.province_id)
      .window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
      .reduce(_ + _,
        new WindowFunction[TradeProvinceOrderBean, TradeProvinceOrderBean, String, TimeWindow] {
          override def apply(key: String, window: TimeWindow, input: Iterable[TradeProvinceOrderBean], out: Collector[TradeProvinceOrderBean]): Unit = {
            import com.cw.realtime.common.util.TimeWindowUtil._
            val (stt, edt, cur_date) = window.windowRange
            input.foreach { e => out.collect(e.transWindowRange(stt, edt, cur_date)) }
          }
        })
  }


  def fixRetraction(ds: DataStream[JsonObject]) = {
    ds.keyBy(_("id")
        .get.asString.get)
      .process(new KeyedProcessFunction[String, JsonObject, TradeProvinceOrderBean] {

        type FuncT = KeyedProcessFunction[String, JsonObject, TradeProvinceOrderBean]

        val desc = new ValueStateDescriptor("last_order_amount", classOf[BigDecimal])
        var state = Option.empty[ValueState[BigDecimal]]

        override def open(parameters: Configuration): Unit = {
          desc.enableTimeToLive(StateTtlConfig.newBuilder(Time.seconds(3)).build)
          state = Try(getRuntimeContext.getState(desc)).toOption
        }

        override def processElement(value: JsonObject, ctx: FuncT#Context, out: Collector[TradeProvinceOrderBean]): Unit =
          for {
            lastAmountState <- state
            lastAmount = Option(lastAmountState.value).getOrElse(BigDecimal(0))
            ts <- value("ts").get.asNumber.get.toLong
            id <- value("id").get.asString
            orderId <- value("order_id").get.asString
            provinceId <- value("province_id").get.asString
            orderAmount <- value("split_total_amount")
            orderAmount <- orderAmount.asString
          } {
            out.collect(
              TradeProvinceOrderBean(provinceId, id, mutable.Set(orderId), 1, BigDecimal(orderAmount) - lastAmount, ts)
            )
            lastAmountState.update(BigDecimal(orderAmount))
          }
      })


  }


  def withWatermark(ds: DataStream[JsonObject]) = {
    ds.assignTimestampsAndWatermarks(
      WatermarkStrategy.forBoundedOutOfOrderness(
        Duration.ofSeconds(3)).withTimestampAssigner(
        new SerializableTimestampAssigner[JsonObject] {
          override def extractTimestamp(element: JsonObject, recordTimestamp: Long): Long =
            element("ts").get.asNumber.get.toLong.get
        }))
  }


  def etl(ds: DataStream[String]) = {
    ds.flatMap { s =>
      for {
        json <- parser.parse(s).toOption // 过滤掉上游回撤流产生的-D数据
        jsonObj <- json.asObject
        ts <- jsonObj("ts")
        _ <- jsonObj("id")
        _ <- jsonObj("order_id")
        _ <- jsonObj("province_id")
        ts <- ts.asNumber
        ts <- ts.toLong
      } yield
        jsonObj.add("ts", (ts * 1000).asJson)

    }


  }


}
