package com.yanggu.bigdata.realtime.app.ods.cdc

import com.ververica.cdc.connectors.mysql.source.MySqlSource
import com.ververica.cdc.connectors.mysql.table.StartupOptions
import com.yanggu.bigdata.realtime.common.GmallConfig.KAFKA_BROKER_LIST
import com.yanggu.bigdata.realtime.utils.KafkaUtil
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.scala._

object Flink_CDCWithConsumerDeserialization {

  def main(args: Array[String]): Unit = {

    //1. 创建流式执行环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //1.1 设置并行度为1
    env.setParallelism(1)

    val tool = ParameterTool.fromPropertiesFile(getClass.getResource("/jdbc.properties").getPath)

    //设置mysql数据源
    val mysqlSource = MySqlSource.builder[String]()
      .hostname(tool.get("jdbc.host"))
      .port(tool.getInt("jdbc.port"))
      .username(tool.get("jdbc.username"))
      .password(tool.get("jdbc.password"))
      .databaseList("gmall_flink")
      //这里如果要监控库下的所有表, 直接写*即可
      .tableList("gmall_flink.*")
      .startupOptions(StartupOptions.initial())
      //自定义反序列化器
      .deserializer(new ConsumerDeserialization)
      .build()

    val mysqlDataStream = env
      //添加mysql数据源
      .fromSource(mysqlSource, WatermarkStrategy.noWatermarks[String](), "MySQL Source")
    mysqlDataStream
      //直接打印输出
      .print()

    //sink数据到Kafka
    mysqlDataStream.addSink(KafkaUtil.getKafkaSink(KAFKA_BROKER_LIST, "ods_base_db"))

    env.execute("Flink_CDCWithConsumerDeserialization Job")
  }

}
