package com.codejiwei.flink.cdc;

import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * author: codejiwei
 * date: 2023/8/27
 * desc: flink cdc demo
 **/
public class MySQLCdcDemo {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        MySqlSource<DebeziumRecord> mysqlCdcSource = MySqlSource.<DebeziumRecord>builder()
        MySqlSource<String> mysqlCdcSource = MySqlSource.<String>builder()
                .hostname("localhost")
                .port(3306)
                .databaseList("mydb")
                .tableList("mydb.test_add_table4")
                .username("root")
                .password("123456")
                .serverId("11113")
                //全量读取
//                .startupOptions(StartupOptions.initial())
                .startupOptions(StartupOptions.earliest())
//                .deserializer(new CustomDebeziumDeserializationSchema())
//                .deserializer(new CustomStringDebeziumDeserializationSchema())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();






        KafkaSink<KafkaRecord> kafkaSink = KafkaSink.<KafkaRecord>builder()
//        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers("192.168.102.154:29092")
                .setRecordSerializer(new CustomKafkaSerializationSchema())
                .build();


        DataStreamSource<String> cdcSource = env.fromSource(mysqlCdcSource, WatermarkStrategy.noWatermarks(), "cdcSource");

        cdcSource.print();


        //DDL Stream
        SingleOutputStreamOperator<String> ddlSQLStream = cdcSource
//                .filter(line -> line.contains("historyRecord") && !line.contains("CHANGE COLUMN"))
                .uid("ddlSQLStream");
//        ddlSQLStream.print();
//                .print();
//        ddlSQLStream.map(new MapFunction<String, String>() {
//            @Override
//            public String map(String value) throws Exception {
//                JSONObject jsonObject = JSONObject.parseObject(value);
//                JSONObject historyRecord = JSONObject.parseObject(jsonObject.getString("historyRecord"));
//                JSONArray tableChangesArray = historyRecord.getJSONArray("tableChanges");
//                JSONObject tableChanges = JSONObject.parseObject(tableChangesArray.getJSONObject(0).toString());
//                String tableChangeType = tableChanges.getString("type");
//
//                String ddlSql = "";
//
//                JSONObject table = tableChanges.getJSONObject("table");
//
//
//                return value;
//            }
//        }).print();




        //DML Stream
        SingleOutputStreamOperator<String> dmlStream = cdcSource.filter(line -> !line.contains("historyRecord") && !line.contains("CHANGE COLUMN"))
                .uid("dmlStream");
//                .print();
//        dmlStream.map(new MapFunction<String, Tuple4<String, String, String, String>>() {
//            @Override
//            public Tuple4<String, String, String, String> map(String value) throws Exception {
//                JSONObject data = new JSONObject();
//
//                String mergeType = "APPEND";
//                JSONObject jsonObject = JSONObject.parseObject(value);
//
//                JSONObject source = jsonObject.getJSONObject("source");
//                String db = source.getString("db");
//                String table = source.getString("table");
//
//                String op = jsonObject.getString("op");
//                if ("d".equals(op)) {
//                    data = jsonObject.getJSONObject("before");
//                    mergeType = "删除";
//                } else if ("u".equals(op)) {
//                    data = jsonObject.getJSONObject("after");
//                    mergeType = "更新";
//                } else if ("c".equals(op)) {
//                    data = jsonObject.getJSONObject("after");
//                    mergeType = "新增";
//                } else if ("r".equals(op)) {
//                    data = jsonObject.getJSONObject("after");
//                    mergeType = "查询";
//                }
//                return new Tuple4<String, String, String, String>(mergeType, db, table, data.toString());
//            }
//        }).print();






//        KafkaSink<KafkaRecord> kafkaSink = KafkaSink.<KafkaRecord>builder()
////        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
//                .setBootstrapServers("192.168.102.154:29092")
//                .setRecordSerializer(new CustomKafkaSerializationSchema())
//                .build();
//
//
//        env.fromSource(mysqlCdcSource, WatermarkStrategy.noWatermarks(), "MySQL-CDC-Source")
//                .map(s -> new KafkaRecord())
//                .sinkTo(kafkaSink);
////                .print();

        env.execute();

    }
}
