package com.atguigu.gmall.cdc;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

import java.util.List;

/**
 * Author: Felix
 * Date: 2022/4/2
 * Desc:  使用FlinkCDC读取MySQL表中数据---API方式实现
 * 自定义反序列化器
 */
public class FlinkCDC03_Custom {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(1);

        DebeziumSourceFunction<String> sourceFunction = MySQLSource.<String>builder()
            .hostname("hadoop202")
            .port(3306)
            .databaseList("gmall0925_realtime")
            .tableList("gmall0925_realtime.t_user")
            .username("root")
            .password("123456")
            .startupOptions(StartupOptions.initial())
            .deserializer(new MyDebeziumDeserializationSchema())
            .build();
        DataStreamSource<String> mySQLDS = env.addSource(sourceFunction);
        mySQLDS.print(">>>>");
        env.execute();
    }
}

class MyDebeziumDeserializationSchema implements DebeziumDeserializationSchema<String> {

    @Override
    public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
        Struct valueStruct = (Struct)sourceRecord.value();
        Struct sourceStruct = valueStruct.getStruct("source");
        String type = Envelope.operationFor(sourceRecord).toString().toLowerCase();
        if("create".equals(type)){
            type = "insert";
        }

        //获取数据库名
        String database = sourceStruct.getString("db");
        //获取表名
        String table = sourceStruct.getString("table");

        //用于封装返回结果的json
        JSONObject resJsonObj = new JSONObject();
        resJsonObj.put("database",database);
        resJsonObj.put("table",table);
        resJsonObj.put("type",type);
        //用于封装影响的记录
        JSONObject dataJsonObj = new JSONObject();

        //获取影响的表中的记录
        Struct afterStruct = valueStruct.getStruct("after");
        if(afterStruct != null){
            List<Field> fieldList = afterStruct.schema().fields();
            for (Field field : fieldList) {
                String fieldName = field.name();
                Object fieldValue = afterStruct.get(field);
                dataJsonObj.put(fieldName, fieldValue);
            }
        }
        resJsonObj.put("data",dataJsonObj);

        collector.collect(resJsonObj.toJSONString());
    }

    @Override
    public TypeInformation<String> getProducedType() {
        return TypeInformation.of(String.class);
    }
}
