package com.cch.bigdata.flink.ods

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.cch.bigdata.flink.model.{AreaEvent}
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.api.scala._

import java.util
import java.util.Properties

//同步lagou_area表示例代码
object SyncDataDemo {

  val databaseName: String = "flink_demo"
  val tableName: String = "lagou_area"

  val opTypeList = new util.ArrayList[String]()

  def main(args: Array[String]): Unit = {

    opTypeList.add("INSERT")
    opTypeList.add("UPDATE")
    opTypeList.add("DELETE")


    //获取执行环境
    val environment: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //模拟kafka数据，不必每次都去库里操作生成数据
//    val kafkaDataStream: DataStream[String] = environment.readTextFile("data/delete.txt")

     //获取kafka消费者程序
     val flinkKafkaConsumer: FlinkKafkaConsumer[String] = getKafkaSource()
     flinkKafkaConsumer.setStartFromLatest()

     val kafkaDataStream: DataStream[String] = environment.addSource(flinkKafkaConsumer)

    //对源源不断的数据做解析处理
    val mappedDataStream: DataStream[util.ArrayList[AreaEvent]] = kafkaDataStream.map(x => {

      //将事件数据转换为json对象
      val eventData: JSONObject = JSON.parseObject(x)

      //获取本次事件的操作类型
      val opType: String = eventData.getString("type")

      //事件处理结果集合
      val datas = new util.ArrayList[AreaEvent]()

      //匹配操作事件类型，不符合的不处理
      if (opTypeList.contains(opType)) {

        //符合规范的事件数据会进入处理逻辑
        val database: String = eventData.getString("database")

        //获取表名
        val table: String = eventData.getString("table")

        //获取事件详情
        val data: JSONArray = eventData.getJSONArray("data")

        //遍历事件详情数组，如果是update或delete，可能会存在极多数据
        //例如 delete from table1 where status=2  或者 update table1 set status=1 where status=2
        data.forEach(x=>{
          val model: AreaEvent  = AreaEvent(database, table, opType, x.toString)
          datas.add(model)
        })
      }
      datas
    })

    mappedDataStream.addSink(new HbaseSink)

    environment.execute()

  }


  def getKafkaSource(): FlinkKafkaConsumer[String] = {
    val props = new Properties()
    props.setProperty("bootstrap.servers", "localhost:9092");
    props.setProperty("group.id", "consumer-group-001")
    props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    props.setProperty("auto.offset.reset", "latest")

    new FlinkKafkaConsumer[String]("topic_lagou_area", new SimpleStringSchema(), props);
  }


}
