package org.lk.common.reader;

import cn.hutool.json.JSONUtil;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

import java.util.HashMap;
import java.util.Map;

/**
 * 自定义反序列化器
 * lk
 */
public class CustomDebeziumDeserializationSchema implements DebeziumDeserializationSchema<String> {
    private static final long serialVersionUID = 1L;


    public CustomDebeziumDeserializationSchema() {
    }


    @Override
    public void deserialize(SourceRecord record, Collector<String> out) throws Exception {
        HashMap<String, Object> hashMap = new HashMap<>();
        Map<String, ?> sourceOffset = record.sourceOffset();
        // binlog的偏移位置
        Object pos = sourceOffset.get("pos");
        Object tsSec = sourceOffset.get("ts_sec");
        Object file = sourceOffset.get("file");

        String topic = record.topic();
        String[] split = topic.split("[.]");
        String database = split[1];
        String table = split[2];
        //获取操作类型
        Envelope.Operation operation = Envelope.operationFor(record);
        // 获取对象本身
        Struct struct = (Struct) record.value();
        Struct after = struct.getStruct("after");
        Struct before = struct.getStruct("before");
        hashMap.put("database", database);
        hashMap.put("table", table);
        hashMap.put("pos", pos);
        hashMap.put("tsSec", tsSec);
        hashMap.put("file", file);
/*
         	 1，同时存在 beforeStruct 跟 afterStruct数据的话，就代表是update的数据
             2,只存在 beforeStruct 就是delete数据
             3，只存在 afterStruct数据 就是insert数据
        */
        Schema afeterSchema = null;
        Schema beforeSchema = null;
        if (before != null && after != null) {
            //update
            afeterSchema = after.schema();
            HashMap<String, Object> hm = new HashMap<>();
            for (Field field : afeterSchema.fields()) {
                hm.put(field.name(), after.get(field.name()));
            }

            beforeSchema = before.schema();
            HashMap<String, Object> map = new HashMap<>();
            for (Field field : beforeSchema.fields()) {
                map.put(field.name(), before.get(field.name()));
            }
            hashMap.put("updateBeforeData", map);
            hashMap.put("data", hm);
        } else if (before != null) {
            //delete
            beforeSchema = before.schema();
            HashMap<String, Object> hm = new HashMap<>();
            for (Field field : beforeSchema.fields()) {
                hm.put(field.name(), before.get(field.name()));
            }
            hashMap.put("data", hm);
        } else if (after != null) {
            //insert
            afeterSchema = after.schema();
            HashMap<String, Object> hm = new HashMap<>();
            for (Field field : afeterSchema.fields()) {
                hm.put(field.name(), after.get(field.name()));
            }
            hashMap.put("data", hm);
        }
        String type = operation.toString().toLowerCase();
        if ("create".equals(type)) {
            type = "insert";
        } else if ("delete".equals(type)) {
            type = "delete";
        } else if ("update".equals(type)) {
            type = "update";
        } else {
            type = "select";
        }
        hashMap.put("type", type);
        out.collect(JSONUtil.toJsonStr(hashMap));
    }

    @Override
    public TypeInformation<String> getProducedType() {
        return BasicTypeInfo.STRING_TYPE_INFO;
    }
}
