package com.cw.realtime.dim
package app

import function.{BcProcessFunction, FlatMapFunction, HbaseSinkFunction}

import com.cw.realtime.common.base.FlinkRunner._
import com.cw.realtime.common.bean._
import com.cw.realtime.common.constant.Constant._
import com.cw.realtime.common.util.FlinkSourceUtil._
import io.circe._
import io.circe.optics.JsonPath._
import io.circe.parser._
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.state.MapStateDescriptor
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.datastream.BroadcastStream
import org.apache.flink.streaming.api.scala.DataStream

object DimApp {


  def main(args: Array[String]): Unit = {
    implicit val conf = RunnerConfig("dim_app", "topic_db", 10001)
    run { (env, ds) =>
      // 从kafka ODS层过滤数据，每一次启动从最初位移消费
      val filtered = etl(ds)

      // cdc获得配置
      val mysqlSource = getMysqlSource(DATABASE_PROCESS, TABLE_PROCESS_DIM)
      val dimConfig = env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), "mysql_source")
        .setParallelism(1)

//      dimConfig.print()

      // 转化dimConfig String流到bean流，同时创建hbase表，如果已经存在则忽略
      val tblProcessStream = createHbaseTable(dimConfig)
//      tblProcessStream.print()
      // 创建广播流
      val bcDesc = new MapStateDescriptor("dim_state", classOf[String], classOf[TableProcessDim])
      val bcStream = tblProcessStream.broadcast(bcDesc)

      // 连接流
      val dimStream = connectStream(filtered, bcStream, bcDesc)


      // 提取连接流中的json数据需要写入的字段
      val extractedStream = extractJsonFields(dimStream)
      extractedStream.print()

      // 写入到hbase
      extractedStream.addSink(new HbaseSinkFunction)

    }

  }


  def connectStream(
    dataStream: DataStream[Json],
    broadcastStream: BroadcastStream[TableProcessDim],
    stateDesc: MapStateDescriptor[String, TableProcessDim]
  ): DataStream[(Json, TableProcessDim)] = {
    val connected = dataStream.connect(broadcastStream)
    connected.process(new BcProcessFunction(stateDesc)).setParallelism(1)
  }


  def etl(ds: DataStream[String]): DataStream[Json] = {
    ds.flatMap { str =>
      parse(str) match {
        case Left(error) => println(error); Option.empty[Json]
        case Right(json) =>
          for {
            db <- root.database.string.getOption(json) if "gmall" == db
            recordType <- root.`type`.string.getOption(json)
            if "bootstrap-start" != recordType && "bootstrap-complete" != recordType
          } yield json
      }
    }
  }


  def createHbaseTable(dimConfig: DataStream[String]): DataStream[TableProcessDim] = {
    dimConfig.flatMap(new FlatMapFunction)
  }

  def extractJsonFields(ds: DataStream[(Json, TableProcessDim)]): DataStream[(JsonObject, TableProcessDim)] = {
    ds.flatMap {
      _ match {
        case (json, tblDim) =>
          val sinkColumns = tblDim.sinkColumns.split(",")
          println(s"columns  ============>   ${sinkColumns.toSeq}")
          for {
            obj <- json.asObject
            remainObj = obj.remove("data")
            data <- obj("data")
            dataObj <- data.asObject
            newDataObj = dataObj.filterKeys(sinkColumns.contains(_)).toJson
          } yield
            (remainObj.add("data", newDataObj), tblDim)
      }
    }
  }


}
