package com.ververica.cdc.guass.source.kafka.table;

import com.ververica.cdc.guass.Constants;
import com.ververica.cdc.guass.source.kafka.data.ChangeEvent;
import com.ververica.cdc.guass.source.kafka.GaussKafkaSourceFunction;
import org.apache.flink.api.common.state.*;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;

import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.table.data.*;
import org.apache.flink.table.types.logical.*;
import org.apache.flink.types.RowKind;
import org.apache.flink.util.CollectionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.nio.charset.StandardCharsets;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class ChangeEventToRowDataSourceFunction extends RichSourceFunction<RowData>  implements CheckpointedFunction, CheckpointListener {

    private final Logger LOG = LoggerFactory.getLogger(this.getClass());


    private final GaussKafkaSourceFunction sourceFunction;
    private final List<PhysicalColumn> physicalColumns;

    private transient SourceContext<RowData> sourceContext;  // 用于向 Flink 提交数据

    private transient boolean kafkaStarted = false;
    private transient ListState<Boolean> kafkaStartedState;

    public ChangeEventToRowDataSourceFunction(GaussKafkaSourceFunction sourceFunction, List<PhysicalColumn> physicalColumns) {
        this.sourceFunction = sourceFunction;
        this.physicalColumns = physicalColumns;
    }


    @Override
    public void open(Configuration parameters) throws Exception {
        // 首先调用代理的 open 方法
        sourceFunction.open(parameters);
    }


    @Override
    public void run(SourceContext<RowData> ctx) throws Exception {

        // Use the local boolean instead of state
        //if (!kafkaStarted) {
            startKafkaReader(ctx);
        //    kafkaStarted = true;
        //}

    }

    // Implement CheckpointedFunction methods
    @Override
    public void snapshotState(FunctionSnapshotContext context) throws Exception {
        // Delegate to the wrapped source function
        sourceFunction.snapshotState(context);
    }

    @Override
    public void initializeState(FunctionInitializationContext context) throws Exception {
        // Delegate to the wrapped source function
        sourceFunction.initializeState(context);

/*        // Initialize our list state
        kafkaStartedState = context.getOperatorStateStore().getListState(
                new ListStateDescriptor<>("KafkaStarted", Boolean.class));

        for (Boolean started : kafkaStartedState.get()) {
            kafkaStarted = started;
        }*/
    }

    // Implement CheckpointListener method
    @Override
    public void notifyCheckpointComplete(long checkpointId) throws Exception {
        // Delegate to the wrapped source function
        sourceFunction.notifyCheckpointComplete(checkpointId);
    }

    private void startKafkaReader(SourceContext<RowData> ctx) throws Exception {

        this.sourceContext = ctx;  // 在 run 方法中初始化 sourceContext
        sourceFunction.run(new SourceContext<ChangeEvent>() {

            @Override
            public void collect(ChangeEvent event) {
                // 将 ChangeEvent 转换为 RowData
                RowData rowData = convertChangeEventToRowData(event);
                synchronized (sourceContext.getCheckpointLock()) {
                    sourceContext.collect(rowData);
                }
            }

            @Override
            public void collectWithTimestamp(ChangeEvent changeEvent, long timestamp) {
                // 将 ChangeEvent 转换为 RowData,并使用提供的时间戳
                RowData rowData = convertChangeEventToRowData(changeEvent);
                synchronized (sourceContext.getCheckpointLock()) {
                    sourceContext.collectWithTimestamp(rowData, timestamp);
                }
            }

            @Override
            public void emitWatermark(Watermark watermark) {
                synchronized (sourceContext.getCheckpointLock()) {
                    sourceContext.emitWatermark(watermark);
                }
            }

            @Override
            public void markAsTemporarilyIdle() {
                sourceContext.markAsTemporarilyIdle();
            }

            @Override
            public Object getCheckpointLock() {
                return sourceContext.getCheckpointLock();
            }

            @Override
            public void close() {
                sourceContext.close();
            }
        });
    }


    @Override
    public void cancel() {
        sourceFunction.cancel();
    }



    private RowData convertChangeEventToRowData(ChangeEvent event) {
        String opType = event.getOpType();
        HashMap<String, Object> dataMap = new HashMap<>();
        if (!CollectionUtil.isNullOrEmpty(event.getKeys())) {
            dataMap.putAll(event.getKeys());
        }
        dataMap.putAll(event.getData());

        switch (opType) {
            case "I":
                return createRowData(RowKind.INSERT, dataMap);
            case "U":

                RowData updateAfterRowData = createRowData(RowKind.UPDATE_AFTER, dataMap);

                return updateAfterRowData;
            case "D":
                return createRowData(RowKind.DELETE, dataMap);
            default:
                // 未知操作类型,跳过或抛出异常
                return null;
        }
    }


    private RowData createRowData(RowKind rowKind, HashMap<String, Object> dataMap) {
        // 如果数据映射为空,无法构建 RowData,返回 null
        if (dataMap == null) {
            return null;
        }

        // 创建具有指定 RowKind 的 GenericRowData
        int numDataFields = physicalColumns.size();
        GenericRowData rowData = new GenericRowData(rowKind, numDataFields);

        StringBuffer marks = new StringBuffer();
        Integer marksIndex = -1;


        // 遍历原 dataMap，将 key 转为大写，value 为 null 时保留原值
        Map<String, Object> resultMap = new HashMap<>();
        for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
            String newKey = entry.getKey().toUpperCase();
            resultMap.put(newKey, entry.getValue());
        }

        for (int i = 0; i < physicalColumns.size(); i++) {

            PhysicalColumn physicalColumn = physicalColumns.get(i);
            String fieldName = physicalColumn.getName();
            fieldName = fieldName.toUpperCase();
            if (fieldName.contains(Constants.PX_GAUSS_MARKS.toUpperCase())) {
                //记录判断kafka是否传null的表
                marksIndex = i;
            }

            LogicalType fieldType = physicalColumn.getDataType().getLogicalType();


            fieldName = fieldName.toUpperCase();
            Object convertedValue = null;
            if (resultMap.containsKey(fieldName.toUpperCase())) {
                Object fieldValue = resultMap.get(fieldName);
                // 转换为Flink基础数据类型
                convertedValue = convertToDataType(fieldValue, fieldType);
                if (convertedValue == null) {
                    // 如果kafka传对应的key但是值为null，则设置为 2
                    marks.append("2");
                } else {
                    // 如果kafka传对应的key但是值不为null，则设置为 1
                    marks.append("1");
                }
            } else {
                // 如果kafka没传对应的key，则设置为 0
                marks.append("0");
            }
            rowData.setField(i, convertedValue);
        }
        if (marksIndex == -1) {
            throw new RuntimeException("No px_gauss_marks found");
        }
        rowData.setField(marksIndex, StringData.fromString(marks.toString()));

        LOG.info("source rowdata is {}", rowData);

        return rowData;
    }


    /**
     * 根据Java基本数据类型据转为Flink定义的基础数据类型
     *
     * @param value
     * @param logicalType
     * @return
     */
    private Object convertToDataType(Object value, LogicalType logicalType) {
        if (value == null) {
            return null;
        }
        switch (logicalType.getTypeRoot()) {
            case INTEGER:
                return ((Number) value).intValue();
            case BIGINT:
                return ((Number) value).longValue();
            case SMALLINT:
                return ((Number) value).shortValue();
            case TINYINT:
                return ((Number) value).byteValue();
            case DOUBLE:
                return ((Number) value).doubleValue();
            case FLOAT:
                return ((Number) value).floatValue();
            case DECIMAL:
                DecimalType decimalType = (DecimalType) logicalType;
                return DecimalData.fromBigDecimal(new java.math.BigDecimal(value.toString()), decimalType.getPrecision(), decimalType.getScale());
            case VARCHAR:
            case CHAR:
                return StringData.fromString(value.toString());
            case BOOLEAN:
                if (value instanceof Boolean) {
                    return (Boolean) value;
                } else {
                    return Boolean.parseBoolean(value.toString());
                }
            case DATE:
                try {
                    java.time.LocalDate date = java.time.LocalDate.parse(value.toString(), DateTimeFormatter.ISO_DATE);
                    // localDate.toEpochDay() 返回的是自1970年1月1日起的天数
                    return (int) date.toEpochDay();
                } catch (java.time.format.DateTimeParseException e) {
                    return null;
                }
            case TIME_WITHOUT_TIME_ZONE:
                try {
                    java.time.LocalTime time = java.time.LocalTime.parse(value.toString(), DateTimeFormatter.ISO_TIME);
                    return (int) (time.toNanoOfDay() / 1_000_000); // 转换为毫秒
                } catch (java.time.format.DateTimeParseException e) {
                    return null;
                }
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                DateTimeFormatter[] formatters = {
                        DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"),
                        DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.S"),
                        DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SS"),
                        DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS"),
                        DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSS"),
                        DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSS"),
                        DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSS")
                };

                for (DateTimeFormatter formatter : formatters) {
                    try {
                        java.time.LocalDateTime dateTime = java.time.LocalDateTime.parse(value.toString(), formatter);
                        return TimestampData.fromLocalDateTime(dateTime);
                    } catch (java.time.format.DateTimeParseException e) {
                        // 如果当前格式解析失败,继续尝试下一个格式
                    }
                }

                // 如果上述格式都无法解析,尝试使用 ISO 日期时间格式
                try {
                    java.time.LocalDateTime dateTime = java.time.LocalDateTime.parse(value.toString(), DateTimeFormatter.ISO_DATE_TIME);
                    return TimestampData.fromLocalDateTime(dateTime);
                } catch (java.time.format.DateTimeParseException ex) {
                    return null;
                }
            case BINARY:
            case VARBINARY:
                if (value instanceof byte[]) {
                    return value;
                } else if (value instanceof String) {
                    return ((String) value).getBytes(StandardCharsets.UTF_8);
                } else {
                    return null;
                }
            default:
                throw new UnsupportedOperationException("Unsupported data type: " + logicalType);
        }
    }

}