package com.haoziqi;

import com.mysql.cj.protocol.ResultsetRowsOwner;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * description
 * created by A on 2021/3/15
 */
public class FlinkSQLCDC {
    public static void main(String[] args) {
        //1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //为什么要写这个
        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        //2.读取FlinkSQL  DDL方式读取MYSQL变化数据
        tableEnv.executeSql("CREATE TABLE user_info ( " +
                " id INT NOT NULL, " +
                " name STRING, " +
                " phone_num STRING " +
                ") WITH ( " +
                " 'connector' = 'mysql-cdc', " +
                " 'hostname' = 'hadoop102', " +
                " 'port' = '3306', " +
                " 'username' = 'root', " +
                " 'password' = '123456', " +
                " 'database-name' = 'gmall_flink_0923', " +
                " 'table-name' = 'z_user_info' " +
                ")");
        Table table = tableEnv.sqlQuery("select * from user_info");
        //将table对象转换为撤回流，保存先前的数据和新增的数据，先前的数据记录为false，新增的为true
        tableEnv.toRetractStream(table, Row.class).print();

        //提交任务
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}
