package com.zhangyang.flink.cdc;


import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;

import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;


/**
 * @author Dell
 * @Title:
 * @Description: 请写注释类
 * @date 2023/2/1
 */
public class KafkaCdcSourceMain {


    public static void main(String[] args) throws Exception {
        Configuration configuration = new Configuration();
        configuration.setString(RestOptions.BIND_PORT, "8099");
//        configuration.setString("rest.flamegraph.enabled", "true");
//        configuration.setString("classloader.resolve-order","parent-first");
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);


        EnvironmentSettings envSettings = EnvironmentSettings
                .newInstance()

                .inStreamingMode()
//                .inBatchMode()
                .build();
//        env.setParallelism(3);
        // note: 增量同步需要开启CK
//        env.enableCheckpointing(10000);
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env, envSettings);

        // DDL，根据kafka数据源创建表
        String kafkaTable = "person";
        String dropsql = "DROP TABLE IF EXISTS "+kafkaTable;
        String sql
                = "CREATE TABLE "+kafkaTable+" (\n" +
                "    pro String,\n" +
                "    domain String,\n" +
                "    port String\n" +
                ") WITH (\n" +
                "   'connector' = 'kafka',\n" +
//                "   'connector.version' = 'universal',\n" +
                "   'topic' = 'soc-event-topic-test',\n" +
                "   'properties.bootstrap.servers' = '10.215.5.18:9092',\n" +
                "   'format' = 'json',\n" +
                " 'json.fail-on-missing-field' = 'false',\n" +
                " 'json.ignore-parse-errors' = 'true',\n" +
                "   'scan.startup.mode' = 'latest-offset'\n" +
                ")";
//        tableEnvironment.executeSql(dropsql);
        tableEnvironment.executeSql(sql);
        Table table = tableEnvironment.sqlQuery("select * from person");

        tableEnvironment.toChangelogStream(table ).print("值输出");

        env.execute("kafka");
    }
}
