package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.Row

object Demo9MysqlCDC {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val settings: EnvironmentSettings = EnvironmentSettings.newInstance()
      .useBlinkPlanner()
      .inStreamingMode()
      .build()


    val bsEnv: StreamTableEnvironment = StreamTableEnvironment.create(env, settings)


    /**
      * mysql-cdc
      * 1、先全量读取表中的数据
      * 2、再实时监控binlog日志实时采集数据
      *
      *
      */

    bsEnv.executeSql(
      """
        |
        |CREATE TABLE student (
        |  id int,
        |  name string,
        |  age int ,
        |  gender string,
        |  clazz string
        |) WITH (
        | 'connector' = 'mysql-cdc',
        | 'hostname' = 'node1',
        | 'port' = '3306',
        | 'username' = 'root',
        | 'password' = '123456',
        | 'database-name' = 'student',
        | 'table-name' = 'student'
        |)
        |
      """.stripMargin)



    bsEnv.executeSql(
      """
        |
        |CREATE TABLE kafka_source (
        |  id int,
        |  name string,
        |  age int ,
        |  gender string,
        |  clazz string
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'student',
        | 'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        | 'format' = 'csv'
        |)
        |
      """.stripMargin)


    /**
      * flink   更新的流是不能直接通过csv 或者json 写kafka的
      *
      *
      * 可以通过 changelog-json 将更新的流写入kafka
      */

    bsEnv.executeSql(
      """
        |
        |CREATE TABLE kafka_sink (
        |  id int,
        |  name string,
        |  age int ,
        |  gender string,
        |  clazz string
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'student1',
        | 'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        | 'format' = 'changelog-json'
        |)
        |
      """.stripMargin)



    bsEnv.executeSql(
      """
        |insert into kafka_sink
        |select * from student
        |
      """.stripMargin)





  }
}
