package com.gxlevi.db

import com.gxlevi.db.bean.{Canal, HBaseOperation}
import com.gxlevi.db.task.PreprocessTask
import com.gxlevi.db.util.{FlinkUtil, HBaseUtil}
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.streaming.api.watermark.Watermark

object App {
  def main(args: Array[String]): Unit = {
    val env = FlinkUtil.initFlinkEnv()

    val consumer = FlinkUtil.initKafkaFlink()
    val kafkaDataStream: DataStream[String] = env.addSource(consumer)
    val canalDataStream = kafkaDataStream.map {
      json =>
        Canal(json)
    }

    val waterDataStream = canalDataStream.assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks[Canal] {
      var currentTimestamp = 0L
      var delayTime = 2000L

      override def getCurrentWatermark: Watermark = {
        new Watermark(currentTimestamp - delayTime)
      }

      override def extractTimestamp(t: Canal, l: Long): Long = {
        currentTimestamp = Math.max(t.timestamp, l)
        currentTimestamp
      }
    })

    val HBaseDS: DataStream[HBaseOperation] = PreprocessTask.process(waterDataStream)
    HBaseDS.print()
    HBaseDS.addSink(new SinkFunction[HBaseOperation] {
      override def invoke(value: HBaseOperation): Unit = {
        value.opType match {
          case "DELETE" => HBaseUtil.deleteData(value.tableName, value.rowKey, value.cfName)
          case _ => HBaseUtil.putData(value.tableName, value.rowKey, value.cfName, value.colName, value.colValue)
        }
      }
    })
    env.execute("canal-hbase")
  }
}
