package com.atguigu.cdc;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

import java.util.List;

/**
 * 自定义反序列化器
 */
public class CustomSchema {
    public static void main(String[] args) throws Exception {
        // TODO: 1. 准备环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        DebeziumSourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("000000")
                .databaseList("gmall2021_realtime")
                .tableList("gmall2021_realtime.t_user")
                .startupOptions(StartupOptions.initial())
                .deserializer(new MySchema())
                .build();
        DataStreamSource<String> source = env.addSource(sourceFunction);
        source.print();
        env.execute();
    }


}
class  MySchema implements DebeziumDeserializationSchema<String>{
    @Override
    public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
        // 获取valueStruct
          Struct valueStruct = (Struct)sourceRecord.value();
         Struct sourceStruct = valueStruct.getStruct("source");
         //获取数据库名称
        String dbName = sourceStruct.getString("db");
        // 获取表名称
        String tableName = sourceStruct.getString("table");
        // 获取操作类型
        Envelope.Operation operation = Envelope.operationFor(sourceRecord);
        String type = operation.toString();
        if ("create".equals(type)){
            type = "insert";
        }
        // 获取afterStruct中所有的属性,并封装为dataJson
        JSONObject datajson = new JSONObject();
//        获取afterStruct
        Struct after = valueStruct.getStruct("after");
        if (after != null){
            List<Field> fieldList = after.schema().fields();
            for (Field field : fieldList) {
                datajson.put(field.name(),after.get(field));
            }
        }
        // TODO: 将库名 表名及操作类型和具体数据封装为一个大的json
        JSONObject resJsonObj = new JSONObject();
        resJsonObj.put("database",dbName);
        resJsonObj.put("table",tableName);
        resJsonObj.put("type",type);
        resJsonObj.put("data",datajson);

 collector.collect(resJsonObj.toString());
    }

    @Override
    public TypeInformation<String> getProducedType() {
        return TypeInformation.of(String.class);
    }
}