package com.reins.spark.service;

import com.github.shyiko.mysql.binlog.BinaryLogFileReader;
import com.github.shyiko.mysql.binlog.event.*;
import com.github.shyiko.mysql.binlog.event.deserialization.EventDeserializer;
import jakarta.annotation.PostConstruct;
import lombok.extern.slf4j.Slf4j;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.ConnectionCallback;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.sql.*;
import java.util.*;
import java.util.Date;

@Service
@Slf4j
public class BinlogService {
    public static final String INSERT_OPERATION = "INSERT";
    public static final String DELETE_OPERATION = "DELETE";
    public static final String UPDATE_OPERATION = "UPDATE";
    public static final String NONE_OPERATION = "NONE";

    @Autowired
    SparkSession sparkSession;

    public static String mergeOperation(String prev, String now) {
        return switch (prev) {
            case INSERT_OPERATION -> Objects.equals(now, DELETE_OPERATION) ? NONE_OPERATION : prev;
            case DELETE_OPERATION -> Objects.equals(now, INSERT_OPERATION) ? now : prev;
            case UPDATE_OPERATION -> Objects.equals(now, DELETE_OPERATION) ? now : prev;
            default -> now;
        };
    }

    private static DataType convertToSparkType(int sqlType) {
        return switch (sqlType) {
            case Types.BOOLEAN -> DataTypes.BooleanType;
            case Types.TINYINT -> DataTypes.ByteType;
            case Types.SMALLINT -> DataTypes.ShortType;
            case Types.INTEGER -> DataTypes.IntegerType;
            case Types.BIGINT -> DataTypes.LongType;
            case Types.FLOAT -> DataTypes.FloatType;
            case Types.REAL, Types.DOUBLE -> DataTypes.DoubleType;
            case Types.NUMERIC, Types.DECIMAL -> DataTypes.createDecimalType();
            case Types.DATE -> DataTypes.DateType;
            case Types.TIME, Types.TIMESTAMP -> DataTypes.TimestampType;
            case Types.BINARY, Types.VARBINARY, Types.LONGVARBINARY -> DataTypes.BinaryType;
            default -> DataTypes.StringType;
        };
    }

    @Autowired
    JdbcTemplate jdbcTemplate;

    private List<String> getTableNames(DatabaseMetaData databaseMetaData) {
        List<String> tableNames = new ArrayList<>();
        try (ResultSet resultSet = databaseMetaData.getTables(null, null, null, new String[]{"TABLE"})) {
            while (resultSet.next()) {
                String tableName = resultSet.getString("TABLE_NAME");
                tableNames.add(tableName);
            }
        } catch (SQLException e) {
            throw new RuntimeException(e);
        }
        return tableNames;
    }

    private StructType getSchema(DatabaseMetaData databaseMetaData, String tableName) {
        List<StructField> fields = new ArrayList<>();
        try (ResultSet columns = databaseMetaData.getColumns(null, null, tableName, null)) {
            while (columns.next()) {
                String columnName = columns.getString("COLUMN_NAME");
                int dataTypeIndex = Integer.parseInt(columns.getString("DATA_TYPE"));
                DataType dataType = convertToSparkType(dataTypeIndex);
                boolean isNullable = Boolean.parseBoolean(columns.getString("IS_NULLABLE"));
                fields.add(DataTypes.createStructField(columnName, dataType, isNullable));
            }
            fields.add(DataTypes.createStructField("_binlog_operation_", DataTypes.StringType, false));
        } catch (SQLException e) {
            throw new RuntimeException(e);
        }
        return DataTypes.createStructType(fields);
    }

    // Map table name to schema
    private Map<String, StructType> getSchemaMap(DatabaseMetaData databaseMetaData) {
        List<String> tableNames = getTableNames(databaseMetaData);
        log.info("Table names: {}", tableNames);
        Map<String, StructType> schemaMap = new HashMap<>();
        for (String tableName : tableNames) {
            StructType schema = getSchema(databaseMetaData, tableName);
            schemaMap.put(tableName, schema);
            log.info("Table {} schema: {}", tableName, schema);
        }
        return schemaMap;
    }

    private Object processValue(Serializable value, DataType columnType) {
        if (value instanceof byte[] bytes && columnType == DataTypes.StringType) {
            return new String(bytes);
        } else if (value instanceof Date date && columnType == DataTypes.TimestampType) {
            return new Timestamp(date.getTime());
        }
        return value;
    }

    private List<Object> processRow(Serializable[] row, StructField[] fields, String operation) {
        List<Object> processed = new ArrayList<>(row.length + 1);
        for (int i = 0; i < row.length; i++) {
            Serializable value = row[i];
            processed.add(processValue(value, fields[i].dataType()));
        }
        processed.add(operation);
        return processed;
    }

    private void mergeRow(List<Object> oldRow, Serializable[] row, StructField[] fields, String operation) {
        String oldOperation = (String) oldRow.get(oldRow.size() - 1);
        String mergedOperation = mergeOperation(oldOperation, operation);
        log.info("Merged operation: {} + {} -> {}", oldOperation, operation, mergedOperation);
        if (Objects.equals(mergedOperation, UPDATE_OPERATION) || Objects.equals(mergedOperation, INSERT_OPERATION)) {
            for (int i = 0; i < oldRow.size() - 1; i++) {
                Serializable value = row[i];
                oldRow.set(i, processValue(value, fields[i].dataType()));
            }
        }
        oldRow.set(oldRow.size() - 1, mergedOperation);
    }

    private void operateRows(List<List<Object>> rows, List<Serializable[]> rawRows, StructField[] fields, Map<Object, Integer> indexMap, String operation) {
        if (rows == null) {
            return;
        }
        for (Serializable[] row : rawRows) {
            if (row.length == 0) {
                continue;
            }
            Object primaryKey = row[0];
            Integer oldRecordIndex = indexMap.get(primaryKey);
            if (oldRecordIndex != null) {
                List<Object> oldRow = rows.get(oldRecordIndex);
                mergeRow(oldRow, row, fields, operation);
            } else {
                indexMap.put(primaryKey, rows.size());
                List<Object> processedRow = processRow(row, fields, operation);
                rows.add(processedRow);
            }
        }
    }

    private void insertRows(List<List<Object>> rows, List<Serializable[]> rawRows, StructField[] fields, Map<Object, Integer> indexMap) {
        operateRows(rows, rawRows, fields, indexMap, INSERT_OPERATION);
    }

    private void deleteRows(List<List<Object>> rows, List<Serializable[]> rawRows, StructField[] fields, Map<Object, Integer> indexMap) {
        operateRows(rows, rawRows, fields, indexMap, DELETE_OPERATION);
    }

    private void updateRows(List<List<Object>> rows, List<Serializable[]> rawRows, StructField[] fields, Map<Object, Integer> indexMap) {
        operateRows(rows, rawRows, fields, indexMap, UPDATE_OPERATION);
    }

    private Map<String, List<List<Object>>> parseBinlog(Map<String, StructType> schemaMap, BinaryLogFileReader reader) {
        String currentTable;
        StructType currentSchema = null;
        Map<Object, Integer> currentIndexMap = null;
        List<List<Object>> currentRows = null;
        Map<String, List<List<Object>>> rowsMap = new HashMap<>();
        Map<String, Map<Object, Integer>> tableIndexMap = new HashMap<>();
        try {
            for (Event event; (event = reader.readEvent()) != null; ) {
//                EventHeader header = event.getHeader();
                EventData data = event.getData();
                if (data instanceof DeleteRowsEventData deleteRows) {
                    if (currentRows == null || currentSchema == null || currentIndexMap == null) {
                        continue;
                    }
                    deleteRows(currentRows, deleteRows.getRows(), currentSchema.fields(), currentIndexMap);
                } else if (data instanceof UpdateRowsEventData updateRows) {
                    if (currentRows == null || currentSchema == null || currentIndexMap == null) {
                        continue;
                    }
                    List<Serializable[]> rows = updateRows.getRows().stream().map(Map.Entry::getValue).toList();
                    updateRows(currentRows, rows, currentSchema.fields(), currentIndexMap);
                } else if (data instanceof WriteRowsEventData writeRows) {
                    if (currentRows == null || currentSchema == null || currentIndexMap == null) {
                        continue;
                    }
                    insertRows(currentRows, writeRows.getRows(), currentSchema.fields(), currentIndexMap);
                } else if (data instanceof TableMapEventData tableMap) {
                    currentTable = tableMap.getTable();
                    currentSchema = schemaMap.get(currentTable);
                    if (!rowsMap.containsKey(currentTable)) {
                        log.info("Current table: {}", currentTable);
                    }
                    rowsMap.computeIfAbsent(currentTable, k -> new ArrayList<>());
                    tableIndexMap.computeIfAbsent(currentTable, k -> new HashMap<>());
                    currentRows = rowsMap.get(currentTable);
                    currentIndexMap = tableIndexMap.get(currentTable);
                }
            }
            reader.close();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        return rowsMap;
    }

    @PostConstruct
    public void test() {
        boolean keepNone = false;

        InputStream inputStream = BinlogService.class
                .getClassLoader()
                .getResourceAsStream("binlog.000003");
        if (inputStream == null) {
            log.error("不存在该 Binlog 文件");
            return;
        }
        EventDeserializer eventDeserializer = new EventDeserializer();
//        eventDeserializer.setCompatibilityMode(EventDeserializer.CompatibilityMode.DATE_AND_TIME_AS_LONG);
        DatabaseMetaData databaseMetaData = jdbcTemplate.execute((ConnectionCallback<DatabaseMetaData>) con -> {
            try {
                return con.getMetaData();
            } catch (SQLException e) {
                log.error(e.getMessage());
                return null;
            }
        });
        if (databaseMetaData == null) {
            log.warn("Failed to load dataset metadata, exit");
            return;
        }

        Map<String, StructType> schemaMap = getSchemaMap(databaseMetaData);
        BinaryLogFileReader reader;
        try {
            reader = new BinaryLogFileReader(inputStream, eventDeserializer);
        } catch (IOException e) {
            log.error(e.getMessage());
            throw new RuntimeException(e);
        }

        Map<String, List<List<Object>>> rowsMap = parseBinlog(schemaMap, reader);
        Map<String, Dataset<Row>> dataFrameMap = new HashMap<>();
        for (Map.Entry<String, List<List<Object>>> entry : rowsMap.entrySet()) {
            String tableName = entry.getKey();
            StructType schema = schemaMap.get(tableName);
            List<List<Object>> rawRows = entry.getValue();
            List<Row> rows = new ArrayList<>();
            for (List<Object> rawRow : rawRows) {
                String operation = (String) rawRow.get(rawRow.size() - 1);
                if (!keepNone && Objects.equals(operation, NONE_OPERATION)) {
                    continue;
                }
                rows.add(RowFactory.create(rawRow.toArray()));
            }
            if (schema == null || rows.isEmpty()) {
                continue;
            }
            log.info("creating data frame for {}, schema: {}", tableName, schema);
            Dataset<Row> dataFrame = sparkSession.createDataFrame(rows, schema);
            dataFrameMap.put(tableName, dataFrame);
            log.info("create done.");
//            df.write().mode(SaveMode.Overwrite).option("header", true).parquet("./test.parquet");
        }

        for (Map.Entry<String, Dataset<Row>> entry : dataFrameMap.entrySet()) {
            String tableName = entry.getKey();
            Dataset<Row> dataFrame = entry.getValue();
            log.info("Showing data frame of {}:", tableName);
            dataFrame.show((int) dataFrame.count());
        }
    }
}
