package com.light.dp.flink.serialization;

import com.alibaba.fastjson.JSONObject;
import com.light.dp.flink.dto.Column;
import com.light.dp.flink.dto.DataChangeInfo;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.*;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.source.SourceRecord;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.util.Collector;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @author zhangzhongzhen wrote on 2024/3/24
 * @version 1.0
 * @description: mysql消息读取自定义序列化
 */
public class MysqlDeserialization implements DebeziumDeserializationSchema<DataChangeInfo> {

    public static final String TS_MS = "ts_ms";
    public static final String BIN_FILE = "file";
    public static final String POS = "pos";
    public static final String CREATE = "CREATE";
    public static final String BEFORE = "before";
    public static final String AFTER = "after";
    public static final String SOURCE = "source";
    public static final String UPDATE = "UPDATE";

    private static final Map<String, List<String>> primaryKes = new ConcurrentHashMap<>();

    /**
     * 反序列化数据,转为变更JSON对象
     */
    @Override
    public void deserialize(SourceRecord sourceRecord, Collector<DataChangeInfo> collector) {
        Struct key = (Struct)sourceRecord.key();
        String topic = sourceRecord.topic();
        String[] fields = topic.split("\\.");
        String database = fields[1];
        String tableName = fields[2];
        Struct struct = (Struct) sourceRecord.value();
        final Struct source = struct.getStruct(SOURCE);
        DataChangeInfo dataChangeInfo = new DataChangeInfo();
        dataChangeInfo.setBeforeData(getJsonObject(struct, BEFORE).toJSONString());
        dataChangeInfo.setAfterData(getJsonObject(struct, AFTER).toJSONString());
        //5.获取操作类型  CREATE UPDATE DELETE
        Envelope.Operation operation = Envelope.operationFor(sourceRecord);
//        String type = operation.toString().toUpperCase();
//        int eventType = type.equals(CREATE) ? 1 : UPDATE.equals(type) ? 2 : 3;
        dataChangeInfo.setEventType(operation.name());
        dataChangeInfo.setFileName(Optional.ofNullable(source.get(BIN_FILE)).map(Object::toString).orElse(""));
        dataChangeInfo.setFilePos(Optional.ofNullable(source.get(POS)).map(x -> Integer.parseInt(x.toString())).orElse(0));
        dataChangeInfo.setDatabase(database);
        dataChangeInfo.setTableName(tableName);
        dataChangeInfo.setChangeTime(Optional.ofNullable(source.get(TS_MS)).map(x -> Long.parseLong(x.toString())).get());

        // 获取主键信息
        Schema keySchema = key.schema();
        List<String> primary = new ArrayList<>();
        for (Field field : keySchema.fields()) {
            String fieldName = field.name();
            Object fieldValue = key.get(field);
            primary.add(fieldName);
        }
        primaryKes.put(topic, primary);

        List<Column> columns = new ArrayList<>();
        if ("CREATE".equals(operation.name())){
            // 获取字段信息
            Schema schema = struct.schema().field(AFTER).schema();
            Struct afterData = ((Struct)struct.get(AFTER));
            for (Field field : schema.fields()) {
                Column column = new Column();
                column.setName(field.name());
                column.setValue(afterData.get(field).toString());
                column.setType(field.schema().type().getName());
                column.setPrimary(primaryKes.get(topic).contains(field.name()));
                columns.add(column);
            }
        } else if ("UPDATE".equals(operation.name())){
            Struct beforeData = ((Struct)struct.get(BEFORE));
            Struct afterData = ((Struct)struct.get(AFTER));
            // 比较字段
            Schema afterSchema = struct.schema().field(AFTER).schema();
            for (Field field : afterSchema.fields()) {
                Object beforeValue = beforeData.get(field);
                Object afterValue = afterData.get(field);

                boolean isPrimary = primaryKes.get(topic).contains(field.name());
                // 如果值发生了变化，记录下来
                if (!Objects.equals(beforeValue, afterValue) || isPrimary) {
                    Column column = new Column();
                    column.setName(field.name());
                    column.setValue(afterValue == null ? null : afterValue.toString());
                    column.setType(field.schema().type().getName());
                    column.setPrimary(isPrimary);
                    columns.add(column);
                }
            }
        } else if ("DELETE".equals(operation.name())){
            Schema schema = struct.schema().field(BEFORE).schema();
            Struct beforeData = ((Struct)struct.get(BEFORE));
            primaryKes.get(topic).forEach(p -> {
                Column column = new Column();
                column.setName(p);
                column.setValue(beforeData.get(p).toString());
                column.setType(schema.field(p).schema().type().getName());
                column.setPrimary(true);
                columns.add(column);
            });
        }
        dataChangeInfo.setColumns(columns.toArray(new Column[0]));
        //7.输出数据
        collector.collect(dataChangeInfo);
    }

    private Struct getStruct(Struct value, String fieldElement) {
        return value.getStruct(fieldElement);
    }

    /**
     * 从元数据获取出变更之前或之后的数据
     */
    private JSONObject getJsonObject(Struct value, String fieldElement) {
        Struct element = value.getStruct(fieldElement);
        JSONObject jsonObject = new JSONObject();
        if (element != null) {
            Schema afterSchema = element.schema();
            List<Field> fieldList = afterSchema.fields();
            for (Field field : fieldList) {
                Object afterValue = element.get(field);
                jsonObject.put(field.name(), afterValue);
            }
        }
        return jsonObject;
    }


    @Override
    public TypeInformation<DataChangeInfo> getProducedType() {
        return TypeInformation.of(DataChangeInfo.class);
    }
}
