package com.sunzm.flink.datastream.scala.cdc

import java.util

import com.alibaba.fastjson.{JSON, JSONObject}
import com.ververica.cdc.connectors.mysql.source.MySqlSource
import com.ververica.cdc.debezium.{DebeziumSourceFunction, JsonDebeziumDeserializationSchema}
import org.apache.commons.lang3.StringUtils
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.streaming.api.scala._
import org.slf4j.{Logger, LoggerFactory}

/**
 *
 * 使用 flink-connector-mysql-cdc 读取 MySqlBinlog
 *
 * @author Administrator
 * @version 1.0
 * @date 2021-08-04 20:37
 */
object MySqlBinlogSourceExample {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$"))
  private val isLocal = true

  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = if (isLocal) {
      StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
    } else {
      StreamExecutionEnvironment.getExecutionEnvironment
    }


    val mySqlSource: MySqlSource[String] = MySqlSource.builder[String]
      .hostname("82.156.210.70")
      .port(3306)
      .databaseList("test") // monitor all tables under test database
      .username("root")
      .password("ABC123abc.123")
      //.serverTimeZone("UTC")
      .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to String
      .build()

    val mysqlDS: DataStream[String] = env.fromSource[String](mySqlSource, WatermarkStrategy.noWatermarks(),
      "mysqlCDC-source")

    // use parallelism 1 for sink to keep message ordering
    //mysqlDS.print().setParallelism(1)
    //mysqlDS.print()

    mysqlDS.map(line => {
      val jSONObject: JSONObject = JSON.parseObject(line)

      jSONObject.getJSONObject("data").toJSONString

    }).print()

    //5.执行
    env.execute(this.getClass.getSimpleName.stripSuffix("$"))
  }
}
