package com.ot.flink.cdc.function.deserializer;

import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverter;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.source.SourceRecord;
import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.storage.ConverterType;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import org.apache.flink.util.Collector;
import org.apache.flink.api.common.typeinfo.TypeInformation;

import java.util.HashMap;

public class CustomDebeziumDeserializationSchema implements DebeziumDeserializationSchema<String> {
    private transient JsonConverter jsonConverter;
    private final Boolean includeSchema=false;
    @Override
    public TypeInformation<String> getProducedType() {
        return TypeInformation.of(String.class);
    }

    @Override
    public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
        String value = sourceRecord.value().toString();
        System.out.println(value);
        // 检查是否为 DDL 事件
        /*if (value.contains("\"ddl\":true")) {
            // 处理 DDL 事件
            collector.collect( value);
        } else {
            // 处理其他事件
            collector.collect( value);
        }*/
        if (this.jsonConverter == null) {
            this.jsonConverter = new JsonConverter();
            HashMap<String, Object> configs = new HashMap(2);
            configs.put("converter.type", ConverterType.VALUE.getName());
            configs.put("schemas.enable", this.includeSchema);
            this.jsonConverter.configure(configs);
        }

        byte[] bytes = this.jsonConverter.fromConnectData(sourceRecord.topic(), sourceRecord.valueSchema(), sourceRecord.value());
        collector.collect(new String(bytes));


    }
}
