package com.leon.datalink.core.persistence.plugin;

import akka.persistence.journal.japi.AsyncWriteJournal;
import akka.persistence.AtomicWrite;
import akka.persistence.PersistentRepr;
import com.leon.datalink.core.evn.EnvUtil;
import com.alibaba.druid.pool.DruidDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.collection.immutable.Seq;
import scala.concurrent.Future;
import akka.dispatch.Futures;

import java.io.*;
import java.sql.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;

/**
 * 基于PostgreSQL的Journal插件
 * 使用PostgreSQL数据库进行真实的数据持久化
 *
 * @author DataLink Team
 * @version 1.0.0
 */
public class PostgresJournalPlugin extends AsyncWriteJournal {

    private static final Logger logger = LoggerFactory.getLogger(PostgresJournalPlugin.class);

    // PostgreSQL数据源
    private DruidDataSource dataSource;
    
    // 表名
    private static final String JOURNAL_TABLE = "datalink_journal";
    
    // 内存索引（用于快速查找）
    private final Map<String, Long> sequenceIndex = new ConcurrentHashMap<>();
    
    public PostgresJournalPlugin() {
        // Akka会自动调用这个构造函数
    }
    
    @Override
    public void preStart() throws Exception {
        super.preStart();

        // 初始化PostgreSQL数据源
        initializeDataSource();
        
        // 创建表结构
        createTablesIfNotExists();

        // 加载现有的序列号索引
        loadSequenceIndex();

        logger.info("PostgreSQL Journal Plugin initialized successfully");
    }
    
    /**
     * 初始化PostgreSQL数据源
     */
    private void initializeDataSource() throws Exception {
        String host = System.getProperty("datalink.persistence.postgres.host", "localhost");
        String port = System.getProperty("datalink.persistence.postgres.port", "5432");
        String database = System.getProperty("datalink.persistence.postgres.database", "datalink");
        String username = System.getProperty("datalink.persistence.postgres.username", "datalink");
        String password = System.getProperty("datalink.persistence.postgres.password", "datalink");
        
        String url = String.format("jdbc:postgresql://%s:%s/%s?useUnicode=true&characterEncoding=utf-8&useSSL=false",
                host, port, database);
        
        dataSource = new DruidDataSource();
        dataSource.setDriverClassName("org.postgresql.Driver");
        dataSource.setUrl(url);
        dataSource.setUsername(username);
        dataSource.setPassword(password);
        dataSource.setInitialSize(5);
        dataSource.setMinIdle(2);
        dataSource.setMaxActive(20);
        dataSource.setValidationQuery("SELECT 1");
        dataSource.setTestWhileIdle(true);
        dataSource.setTestOnBorrow(false);
        dataSource.setTestOnReturn(false);
        
        // 初始化连接池
        dataSource.init();
        
        logger.info("PostgreSQL DataSource initialized: {}", url);
    }
    
    /**
     * 创建表结构
     */
    private void createTablesIfNotExists() throws Exception {
        String createTableSQL = String.format(
            "CREATE TABLE IF NOT EXISTS %s (" +
            "id BIGSERIAL PRIMARY KEY," +
            "persistence_id VARCHAR(255) NOT NULL," +
            "sequence_nr BIGINT NOT NULL," +
            "manifest VARCHAR(255) NOT NULL," +
            "writer_uuid VARCHAR(255) NOT NULL," +
            "payload BYTEA NOT NULL," +
            "created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP," +
            "UNIQUE(persistence_id, sequence_nr)" +
            ")", JOURNAL_TABLE);

        String createIndexSQL = String.format(
            "CREATE INDEX IF NOT EXISTS idx_%s_persistence_id_sequence " +
            "ON %s (persistence_id, sequence_nr)", JOURNAL_TABLE, JOURNAL_TABLE);
        
        try (Connection conn = dataSource.getConnection();
             Statement stmt = conn.createStatement()) {
            
            stmt.execute(createTableSQL);
            stmt.execute(createIndexSQL);
            
            logger.info("Journal table {} created/verified", JOURNAL_TABLE);
        }
    }
    
    @Override
    public Future<Iterable<Optional<Exception>>> doAsyncWriteMessages(Iterable<AtomicWrite> messages) {
        try {
            java.util.List<Optional<Exception>> results = new java.util.ArrayList<>();

            for (AtomicWrite atomicWrite : messages) {
                try {
                    writeAtomicWriteToDatabase(atomicWrite);
                    results.add(Optional.empty()); // 成功
                } catch (Exception e) {
                    logger.error("Failed to write atomic write: {}", atomicWrite, e);
                    results.add(Optional.of(e)); // 失败
                }
            }

            return Futures.successful((Iterable<Optional<Exception>>) results);
        } catch (Exception e) {
            logger.error("Failed to write messages", e);
            return Futures.failed(e);
        }
    }
    
    @Override
    public Future<Void> doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr) {
        try {
            String deleteSQL = "DELETE FROM " + JOURNAL_TABLE + 
                             " WHERE persistence_id = ? AND sequence_nr <= ?";
            
            try (Connection conn = dataSource.getConnection();
                 PreparedStatement stmt = conn.prepareStatement(deleteSQL)) {
                
                stmt.setString(1, persistenceId);
                stmt.setLong(2, toSequenceNr);
                
                int deletedCount = stmt.executeUpdate();
                
                // 从索引中移除
                for (long seq = 1; seq <= toSequenceNr; seq++) {
                    String key = createJournalKey(persistenceId, seq);
                    sequenceIndex.remove(key);
                }
                
                logger.debug("Deleted {} messages for persistenceId: {} up to sequence: {}",
                           deletedCount, persistenceId, toSequenceNr);
            }
            
            return Futures.successful(null);
        } catch (Exception e) {
            logger.error("Failed to delete messages for persistenceId: {}", persistenceId, e);
            return Futures.failed(e);
        }
    }
    
    @Override
    public Future<Void> doAsyncReplayMessages(String persistenceId, long fromSequenceNr,
                                            long toSequenceNr, long max,
                                            Consumer<PersistentRepr> recoveryCallback) {
        try {
            String selectSQL = "SELECT persistence_id, sequence_nr, manifest, writer_uuid, payload " +
                             "FROM " + JOURNAL_TABLE + 
                             " WHERE persistence_id = ? AND sequence_nr >= ? AND sequence_nr <= ? " +
                             "ORDER BY sequence_nr LIMIT ?";
            
            try (Connection conn = dataSource.getConnection();
                 PreparedStatement stmt = conn.prepareStatement(selectSQL)) {
                
                stmt.setString(1, persistenceId);
                stmt.setLong(2, fromSequenceNr);
                stmt.setLong(3, toSequenceNr);
                stmt.setLong(4, max);
                
                try (ResultSet rs = stmt.executeQuery()) {
                    long count = 0;
                    while (rs.next() && count < max) {
                        PersistentRepr repr = readPersistentReprFromResultSet(rs);
                        if (repr != null) {
                            recoveryCallback.accept(repr);
                            count++;
                        }
                    }
                    
                    logger.debug("Replayed {} messages for persistenceId: {} from {} to {}",
                               count, persistenceId, fromSequenceNr, toSequenceNr);
                }
            }
            
            return Futures.successful(null);
        } catch (Exception e) {
            logger.error("Failed to replay messages for persistenceId: {}", persistenceId, e);
            return Futures.failed(e);
        }
    }
    
    @Override
    public Future<Long> doAsyncReadHighestSequenceNr(String persistenceId, long fromSequenceNr) {
        try {
            String selectSQL = "SELECT MAX(sequence_nr) FROM " + JOURNAL_TABLE + 
                             " WHERE persistence_id = ? AND sequence_nr >= ?";
            
            try (Connection conn = dataSource.getConnection();
                 PreparedStatement stmt = conn.prepareStatement(selectSQL)) {
                
                stmt.setString(1, persistenceId);
                stmt.setLong(2, fromSequenceNr);
                
                try (ResultSet rs = stmt.executeQuery()) {
                    long highestSeq = fromSequenceNr - 1;
                    if (rs.next()) {
                        long maxSeq = rs.getLong(1);
                        if (!rs.wasNull()) {
                            highestSeq = maxSeq;
                        }
                    }
                    
                    logger.debug("Highest sequence number for persistenceId: {} is {}",
                               persistenceId, highestSeq);
                    return Futures.successful(highestSeq);
                }
            }
        } catch (Exception e) {
            logger.error("Failed to read highest sequence number for persistenceId: {}",
                       persistenceId, e);
            return Futures.successful(fromSequenceNr - 1);
        }
    }
    
    /**
     * 将AtomicWrite写入数据库
     */
    private void writeAtomicWriteToDatabase(AtomicWrite atomicWrite) throws Exception {
        String insertSQL = "INSERT INTO " + JOURNAL_TABLE + 
                         " (persistence_id, sequence_nr, manifest, writer_uuid, payload) " +
                         "VALUES (?, ?, ?, ?, ?) ON CONFLICT (persistence_id, sequence_nr) DO NOTHING";
        
        try (Connection conn = dataSource.getConnection();
             PreparedStatement stmt = conn.prepareStatement(insertSQL)) {
            
            scala.collection.Iterator<PersistentRepr> iterator = atomicWrite.payload().iterator();
            
            while (iterator.hasNext()) {
                PersistentRepr repr = iterator.next();
                
                stmt.setString(1, repr.persistenceId());
                stmt.setLong(2, repr.sequenceNr());
                stmt.setString(3, repr.manifest());
                stmt.setString(4, repr.writerUuid());
                
                // 序列化payload
                byte[] payloadBytes = serializePayload(repr.payload());
                stmt.setBytes(5, payloadBytes);
                
                stmt.addBatch();
                
                // 更新索引
                String key = createJournalKey(repr.persistenceId(), repr.sequenceNr());
                sequenceIndex.put(key, repr.sequenceNr());
            }
            
            stmt.executeBatch();
        }
    }
    
    /**
     * 从ResultSet读取PersistentRepr
     */
    private PersistentRepr readPersistentReprFromResultSet(ResultSet rs) throws Exception {
        String persistenceId = rs.getString("persistence_id");
        long sequenceNr = rs.getLong("sequence_nr");
        String manifest = rs.getString("manifest");
        String writerUuid = rs.getString("writer_uuid");
        byte[] payloadBytes = rs.getBytes("payload");
        
        // 反序列化payload
        Object payload = deserializePayload(payloadBytes);
        
        // 重建PersistentRepr
        return PersistentRepr.apply(
            payload,
            sequenceNr,
            persistenceId,
            manifest,
            false, // deleted
            null, // sender
            writerUuid
        );
    }
    
    /**
     * 序列化payload
     */
    private byte[] serializePayload(Object payload) throws Exception {
        try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
             ObjectOutputStream oos = new ObjectOutputStream(baos)) {
            
            if (payload instanceof Serializable) {
                oos.writeObject(payload);
            } else {
                oos.writeObject(payload.toString());
            }
            
            oos.flush();
            return baos.toByteArray();
        }
    }
    
    /**
     * 反序列化payload
     */
    private Object deserializePayload(byte[] payloadBytes) throws Exception {
        try (ByteArrayInputStream bais = new ByteArrayInputStream(payloadBytes);
             ObjectInputStream ois = new ObjectInputStream(bais)) {
            
            return ois.readObject();
        }
    }
    
    /**
     * 创建Journal键
     */
    private String createJournalKey(String persistenceId, long sequenceNr) {
        return persistenceId + "-" + String.format("%020d", sequenceNr);
    }
    
    /**
     * 加载序列号索引
     */
    private void loadSequenceIndex() {
        try {
            String selectSQL = "SELECT persistence_id, sequence_nr FROM " + JOURNAL_TABLE;
            
            try (Connection conn = dataSource.getConnection();
                 Statement stmt = conn.createStatement();
                 ResultSet rs = stmt.executeQuery(selectSQL)) {
                
                while (rs.next()) {
                    String persistenceId = rs.getString("persistence_id");
                    long sequenceNr = rs.getLong("sequence_nr");
                    String key = createJournalKey(persistenceId, sequenceNr);
                    sequenceIndex.put(key, sequenceNr);
                }
                
                logger.info("Loaded sequence index with {} entries", sequenceIndex.size());
            }
        } catch (Exception e) {
            logger.error("Failed to load sequence index", e);
        }
    }
    
    @Override
    public void postStop() throws Exception {
        // 关闭数据源
        if (dataSource != null) {
            dataSource.close();
        }
        
        // 清理内存索引
        if (sequenceIndex != null) {
            sequenceIndex.clear();
        }
        
        logger.info("PostgreSQL Journal Plugin stopped");
        super.postStop();
    }
}
