package com.ververica.cdc.guass.source.jdbc;

import com.ververica.cdc.guass.Constants;
import com.ververica.cdc.guass.sink.jdbc.JdbcConnectionOptions;
import com.ververica.cdc.guass.source.kafka.table.PhysicalColumn;
import org.apache.flink.api.connector.source.ReaderOutput;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.types.logical.LogicalType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.*;
import java.sql.Date;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;

public class SnapshotReader implements AutoCloseable {

    private final Logger log = LoggerFactory.getLogger(this.getClass());

    private final JdbcConnectionOptions jdbcOptions;
    private final String tableName;
    private final List<String> splitColumns;  // 分片列列表
    private final int splitSize;              // 分片大小
    private final List<PhysicalColumn> columns;

    private final Map<String, PhysicalColumn> columnMap; // 用于快速查找列类型的映射

    private Connection connection;

    public SnapshotReader(JdbcConnectionOptions jdbcOptions, String tableName, List<String> splitColumns,  // 支持多个分片键
                          int splitSize, List<PhysicalColumn> columns) {
        this.jdbcOptions = jdbcOptions;
        this.tableName = tableName;
        this.splitColumns = splitColumns;
        this.splitSize = splitSize;
        this.columns = columns;
        // 初始化列映射
        this.columnMap = columns.stream().collect(Collectors.toMap(PhysicalColumn::getName, col -> col));
    }

    public void open() throws SQLException, ClassNotFoundException {

        // 建立数据库连接
        if (connection == null) {
            connection = createConnection(jdbcOptions);
        }
    }

    private Connection createConnection(JdbcConnectionOptions options) throws SQLException, ClassNotFoundException {
        // 加载JDBC驱动
        if (options.getDriverName() != null) {
            Class.forName(options.getDriverName());
        }

        // 建立连接
        Properties properties = new Properties();
        properties.putAll(options.getProperties());
        return DriverManager.getConnection(options.getDbURL(), properties);
    }

    // 读取分片数据
    public void readSplit(TableSplit split, ReaderOutput<RowData> output) throws SQLException {
        String sql = buildSplitQuery(split);
        log.info("Executing split query: {}", sql);

        try (PreparedStatement stmt = connection.prepareStatement(sql)) {
            List<String> splitCols = split.getSplitColumns();

            if (split.getSplitStart().length != splitCols.size() || split.getSplitEnd().length != splitCols.size()) {
                throw new IllegalArgumentException("splitStart 和 splitEnd 的长度必须与 splitColumns 的数量相同。");
            }


            StringBuffer params = new StringBuffer();
            // 设置 splitStart 参数
            for (int i = 0; i < splitCols.size(); i++) {
                String colName = splitCols.get(i);
                PhysicalColumn column = columnMap.get(colName);
                setPreparedStatementParam(stmt, 2 * i + 1, column, split.getSplitStart()[i]);
                setPreparedStatementParam(stmt, 2 * i + 2, column, split.getSplitEnd()[i]);
                params.append(split.getSplitStart()[i] + ",");
                params.append(split.getSplitEnd()[i] + ",");
            }



            params = params.deleteCharAt(params.length() - 1);

            log.info("split params  is: {}", params);

            try (ResultSet rs = stmt.executeQuery()) {
                while (rs.next()) {
                    RowData row = convertToRowData(rs, columns);
                    output.collect(row);
                }
            }
        }
    }


    // 修改分片查询条件生成逻辑
    private String buildSplitQuery(TableSplit split) {
        StringBuilder query = new StringBuilder("SELECT ")
                .append(columns.stream().map(PhysicalColumn::getName).collect(Collectors.joining(", ")))
                .append(" FROM ").append(tableName)
                .append(" WHERE ");

        // 动态生成复合条件
        List<String> conditions = new ArrayList<>();
        for (String splitColumn : split.getSplitColumns()) {
            conditions.add(splitColumn + " BETWEEN ? AND ?");
        }
        query.append(String.join(" AND ", conditions));

        // 优化排序方式
        query.append(" ORDER BY ")
                .append(split.getSplitColumns().stream()
                        .map(col -> col + " ASC")
                        .collect(Collectors.joining(", ")));

        return query.toString();
    }



    // 设置 PreparedStatement 参数，根据列的类型
    private void setPreparedStatementParam(PreparedStatement stmt, int parameterIndex, PhysicalColumn column, Object value) throws SQLException {
        LogicalType type = column.getDataType().getLogicalType();
        if (value == null) {
            stmt.setNull(parameterIndex, getSqlType(type));
            return;
        }

        switch (type.getTypeRoot()) {
            case INTEGER:
                stmt.setInt(parameterIndex, ((Number) value).intValue());
                break;
            case BIGINT:
                stmt.setLong(parameterIndex, ((Number) value).longValue());
                break;
            case FLOAT:
                stmt.setFloat(parameterIndex, ((Number) value).floatValue());
                break;
            case DOUBLE:
                stmt.setDouble(parameterIndex, ((Number) value).doubleValue());
                break;
            case VARCHAR:
                stmt.setString(parameterIndex, (String) value);
                break;
            case BOOLEAN:
                stmt.setBoolean(parameterIndex, (Boolean) value);
                break;
            case DATE:
                stmt.setDate(parameterIndex, (Date) value);
                break;
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                stmt.setTimestamp(parameterIndex, (Timestamp) value);
                break;
            // 处理其他类型...
            default:
                throw new UnsupportedOperationException("Unsupported type: " + type);
        }
    }

    // 辅助方法：将 LogicalType 映射到 SQL 类型
    private int getSqlType(LogicalType type) {
        switch (type.getTypeRoot()) {
            case INTEGER:
                return Types.INTEGER;
            case BIGINT:
                return Types.BIGINT;
            case FLOAT:
                return Types.FLOAT;
            case DOUBLE:
                return Types.DOUBLE;
            case VARCHAR:
                return Types.VARCHAR;
            case BOOLEAN:
                return Types.BOOLEAN;
            case DATE:
                return Types.DATE;
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                return Types.TIMESTAMP;
            // 处理其他类型...
            default:
                throw new UnsupportedOperationException("Unsupported type: " + type);
        }
    }

    private static RowData convertToRowData(ResultSet rs, List<PhysicalColumn> columns) throws SQLException {
        int arity = columns.size();
        GenericRowData row = new GenericRowData(arity);

        StringBuffer marks = new StringBuffer();
        Integer marksIndex = -1;

        // 遍历每一列，处理字段
        for (int i = 0; i < arity; i++) {
            PhysicalColumn column = columns.get(i);
            String fieldName = column.getName();
            fieldName = fieldName.toUpperCase();  // 确保字段名大写

            // 记录是否有与 "px_gauss_marks" 相关的字段
            if (fieldName.contains(Constants.PX_GAUSS_MARKS.toUpperCase())) {
                marksIndex = i;  // 记录 "px_gauss_marks" 的索引
            }

            // 转换为 FLink 数据类型
            Object convertedValue = getField(rs, i + 1, column.getDataType().getLogicalType());

            // 如果 Kafka 没有传递对应字段，标记为 0
            if (convertedValue == null) {
                marks.append("2");  // 对应字段值为 null 的情况
            } else {
                marks.append("1");  // 对应字段值不为 null 的情况
            }

            // 设置转换后的字段值到 RowData
            row.setField(i, convertedValue);
        }

        // 确保找到 px_gauss_marks 字段，否则抛出异常
        if (marksIndex == -1) {
            throw new RuntimeException("No px_gauss_marks found");
        }

        // 设置 marks 字段
        row.setField(marksIndex, StringData.fromString(marks.toString()));

        return row;
    }



    // 获取字段值并转换类型
    private static Object getField(ResultSet rs, int index, LogicalType logicalType) throws SQLException {
        switch (logicalType.getTypeRoot()) {
            case INTEGER:
                return rs.getInt(index);
            case BIGINT:
                return rs.getLong(index);
            case FLOAT:
                return rs.getFloat(index);
            case DOUBLE:
                return rs.getDouble(index);
            case VARCHAR:
                String str = rs.getString(index);
                return str != null ? StringData.fromString(str) : null;
            case BOOLEAN:
                return rs.getBoolean(index);
            case DATE:
                try {
                    Date date = rs.getDate(index);
                    java.time.LocalDate localDate = java.time.LocalDate.parse(date.toString(), DateTimeFormatter.ISO_DATE);
                    return (int) localDate.toEpochDay();
                } catch (java.time.format.DateTimeParseException e) {
                    return null;
                }
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                try {
                    Timestamp ts = rs.getTimestamp(index);
                    java.time.LocalTime time = java.time.LocalTime.parse(ts.toString(), DateTimeFormatter.ISO_TIME);
                    return (int) (time.toNanoOfDay() / 1_000_000); // 转换为毫秒
                } catch (java.time.format.DateTimeParseException e) {
                    return null;
                }
            default:
                throw new UnsupportedOperationException("Unsupported type: " + logicalType);
        }
    }

    @Override
    public void close() throws Exception {
        if (connection != null) {
            connection.close();
            connection = null;
        }
    }
}
