package ods

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.api.scala._
import java.util

/**
 * @author: yehw
 * @date: 2021/3/10 下午10:06
 * @description: kafka数据到hbase中
 */
object KafkaToHbase {
  def main(args: Array[String]): Unit = {
    //topic中获取数据
    val environment = StreamExecutionEnvironment.getExecutionEnvironment
    val kafkaConsumer = new SourceKafka().getKafkaSource("test")
    kafkaConsumer.setStartFromLatest()
    val sourceStream = environment.addSource(kafkaConsumer);
    sourceStream.print()
    //获取的数据格式转换
    val mapped: DataStream[util.ArrayList[TableObject]] = sourceStream.map(x => {
      val jsonObj: JSONObject = JSON.parseObject(x)
      //数据库
      val database: AnyRef = jsonObj.get("database")
      //表格
      val table: AnyRef = jsonObj.get("table")
      //操作类型
      val typeInfo: AnyRef = jsonObj.get("type")
      val objects = new util.ArrayList[TableObject]()
      jsonObj.getJSONArray("data").forEach(x => {
        print(database.toString + ".." + table.toString + "..." + typeInfo.toString + ".." + x.toString)
        objects.add(TableObject(database.toString, table.toString, typeInfo.toString, x.toString))
      })
      objects
    })
    /** * 将数据下沉到HBase中保存 *
     * 1.拿到当前的数据 *
     * 2、addSink（）
     * --- 自定义下沉器SinkHBase */
    mapped.addSink(new SinkHBase)
    environment.execute()
  }


}
