package com.zshstart.app.func;


import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

/**
 * 定义反序列化器将数据转为JSON格式的数据
 *
 * @author zshstart
 * @create 2021-09-22 21:29
 */
public class MyStringDebeziumDeserializationSchema implements DebeziumDeserializationSchema<String> {
    /**
     * 将日志服务器传到kafka中封装为json数据  哪些数据是有效数据?即将数据封装成什么样?
     * 如下:库名,表名,修改前数据,修改后数据,操作方式
     * {
     * "database":"XXXX",
     * "tableName":"XXXXX",
     * "before":{"id":"XXXX","name":"XXXX"},
     * "after":{"id":"xxxx","name":"xxxx"},
     * "op":"XXXX"
     * }
     * 注:如op为delete时:没有after
     * op为create时:没有before
     * 因此考虑空指针异常
     *
     * @param sourceRecord
     * @param collector
     * @throws Exception
     */
    @Override
    public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {


        //处理库名&表名
        String topic = sourceRecord.topic();
        //\\代表转义
        String[] split = topic.split("\\.");
        String database = split[1];
        String tableName = split[2];

        //处理数据本身
        Struct value = (Struct) sourceRecord.value();

        Struct beforeValue = value.getStruct("before");
        JSONObject beforeJson = new JSONObject();
        //考虑op为create
        if (beforeValue != null) {
            Schema beforeSchema = beforeValue.schema();
            for (Field field : beforeSchema.fields()) {
                beforeJson.put(field.name(), beforeValue.get(field));
            }
        }

        Struct afterValue = value.getStruct("after");
        JSONObject afterJson = new JSONObject();
        //考虑op为delete
        if (afterValue != null) {
            Schema afterSchema = afterValue.schema();
            for (Field field : afterSchema.fields()) {
                afterJson.put(field.name(), afterValue.get(field));
            }
        }

        //处理操作类型 CRUD
        Envelope.Operation operation = Envelope.operationFor(sourceRecord);
        String type = operation.toString().toLowerCase();
//        System.out.println(type);
        if ("create".equals(type)) {
            type = "insert";
        }

        //创建JSON对象用于存放结果数据
        JSONObject result = new JSONObject();

        result.put("database", database);
        result.put("tableName", tableName);
        result.put("before", beforeJson);
        result.put("after", afterJson);
        result.put("type", type);

        //返回结果值
        collector.collect(result.toJSONString());


    }

    @Override
    public TypeInformation<String> getProducedType() {
        return BasicTypeInfo.STRING_TYPE_INFO;
    }
}
