package com.patsnap.data.npd.dw.etl.serialization;

import com.patsnap.one.etl.cdc.CdcRecord;
import com.patsnap.one.etl.cdc.cleaner.CdcRecordCleaner;
import com.patsnap.one.etl.cdc.ticdc.canal.json.decoder.TicdcCanalJsonRecordDecoder;
import com.patsnap.one.etl.table.meta.definition.SourceTableDefinition;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.Set;

import static com.patsnap.one.etl.constant.Constants.KAFKA_SOURCE_INFO;

@Slf4j
public class TicdcCanalJsonKafkaRecordDeserializationSchema implements KafkaRecordDeserializationSchema<CdcRecord> {
    private final TicdcCanalJsonRecordDecoder ticdcCanalJsonRecordDecoder;

    public TicdcCanalJsonKafkaRecordDeserializationSchema() {
        this(null, null);
    }

    public TicdcCanalJsonKafkaRecordDeserializationSchema(List<CdcRecordCleaner> cdcRecordCleaners) {
        this(cdcRecordCleaners, null);
    }

    public TicdcCanalJsonKafkaRecordDeserializationSchema(Set<String> sourceTableNames) {
        this(sourceTableNames, null, null, null);
    }

    public TicdcCanalJsonKafkaRecordDeserializationSchema(List<CdcRecordCleaner> cdcRecordCleaners, Map<String, Set<String>> tableNameRefIgnoreCompareFields) {
        this(null, cdcRecordCleaners, tableNameRefIgnoreCompareFields, null);
    }

    public TicdcCanalJsonKafkaRecordDeserializationSchema(Set<String> sourceTableNames, List<CdcRecordCleaner> cdcRecordCleaners, Map<String, Set<String>> tableNameRefIgnoreCompareFields, Map<String, SourceTableDefinition> tableNameRefSourceTableDefinitionMap) {
        ticdcCanalJsonRecordDecoder = new TicdcCanalJsonRecordDecoder(sourceTableNames, tableNameRefIgnoreCompareFields, tableNameRefSourceTableDefinitionMap, cdcRecordCleaners);
    }

    public void collect(CdcRecord.KafkaSourceInfo kafkaSourceInfo, List<CdcRecord> results, Collector<CdcRecord> collector) {
        if (CollectionUtils.isNotEmpty(results)) {
            for (CdcRecord cdcRecord : results) {
                cdcRecord.getSource().getProperties().put(KAFKA_SOURCE_INFO, kafkaSourceInfo);
                collector.collect(cdcRecord);
            }
        }
    }

    @Override
    public void deserialize(ConsumerRecord<byte[], byte[]> record, Collector<CdcRecord> collector) throws IOException {
        try {
            collect(CdcRecord.KafkaSourceInfo.builder().offset(record.offset()).partition(record.partition()).topic(record.topic()).build(),
                    ticdcCanalJsonRecordDecoder.decode(new String(record.value(), StandardCharsets.UTF_8)), collector);
        } catch (Exception e) {
            log.error("Failed to deserialize topic [{}] partition [{}] offset [{}] message: {}",
                    record.topic(), record.partition(), record.offset(), new String(record.value(), StandardCharsets.UTF_8), e);
            throw e;
        }
    }

    @Override
    public TypeInformation<CdcRecord> getProducedType() {
        return TypeInformation.of(CdcRecord.class);
    }
}
