package com.datagateway.component;

import com.datagateway.config.DataGatewayProperties;
import com.datagateway.model.ProcessedData;
import com.datagateway.component.PartitionManager;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;

/**
 * Hive写入组件
 * 负责将处理后的数据批量写入Hive表，解决小文件问题
 * 
 * @author Data Gateway Team
 * @version 1.0.0
 */
@Component
public class HiveWriter {

    private static final Logger logger = LoggerFactory.getLogger(HiveWriter.class);

    @Autowired
    private DataSource dataSource;

    @Autowired
    private DataGatewayProperties properties;

    @Autowired
    private ObjectMapper objectMapper;

    @Autowired
    private PartitionManager partitionManager;

    /**
     * 写入成功计数
     */
    private final AtomicLong writeSuccessCount = new AtomicLong(0);

    /**
     * 更新批处理大小
     * 
     * @param batchSize 新的批处理大小
     */
    public void updateBatchSize(int batchSize) {
        this.batchSize = batchSize;
        logger.info("Hive写入批处理大小已更新: {}", batchSize);
    }

    /**
     * 更新批处理超时时间
     * 
     * @param timeout 新的超时时间（毫秒）
     */
    public void updateBatchTimeout(int timeout) {
        this.batchTimeout = timeout;
        logger.info("Hive写入批处理超时时间已更新: {} ms", timeout);
    }
    
    /**
     * 写入失败计数
     */
    private final AtomicLong writeFailureCount = new AtomicLong(0);
    
    /**
     * 总写入数据量
     */
    private final AtomicLong totalWriteBytes = new AtomicLong(0);

    /**
     * 批量写入数据到Hive
     * 
     * @param batchData 批次数据列表
     */
    public void writeBatch(List<ProcessedData> batchData) {
        if (batchData == null || batchData.isEmpty()) {
            logger.warn("批次数据为空，跳过写入");
            return;
        }

        long startTime = System.currentTimeMillis();
        Connection connection = null;
        PreparedStatement statement = null;

        try {
            // 获取数据库连接
            connection = dataSource.getConnection();
            connection.setAutoCommit(false);

            // 准备插入语句
            String insertSql = buildInsertSql();
            statement = connection.prepareStatement(insertSql);

            // 批量插入数据
            int batchCount = 0;
            for (ProcessedData data : batchData) {
                if (data.getStatus() == ProcessedData.ProcessStatus.SUCCESS) {
                    setStatementParameters(statement, data);
                    statement.addBatch();
                    batchCount++;
                }
            }

            // 执行批量插入
            if (batchCount > 0) {
                statement.executeBatch();
                connection.commit();
                
                // 更新统计信息
                writeSuccessCount.addAndGet(batchCount);
                long writeBytes = batchData.stream()
                    .mapToLong(data -> data.getDataSize() != null ? data.getDataSize() : 0)
                    .sum();
                totalWriteBytes.addAndGet(writeBytes);
                
                long processingTime = System.currentTimeMillis() - startTime;
                logger.info("批量写入Hive成功: 批次大小={}, 实际写入={}, 耗时={}ms, 数据量={}bytes", 
                           batchData.size(), batchCount, processingTime, writeBytes);
            } else {
                logger.warn("批次中没有有效数据，跳过写入");
            }

        } catch (SQLException e) {
            logger.error("批量写入Hive失败: 批次大小={}", batchData.size(), e);
            writeFailureCount.addAndGet(batchData.size());
            
            // 回滚事务
            if (connection != null) {
                try {
                    connection.rollback();
                } catch (SQLException rollbackException) {
                    logger.error("回滚事务失败", rollbackException);
                }
            }
            
            throw new RuntimeException("批量写入Hive失败", e);
            
        } finally {
            // 关闭资源
            closeResources(statement, connection);
        }
    }

    /**
     * 构建插入SQL语句
     * 
     * @return 插入SQL语句
     */
    private String buildInsertSql() {
        String database = properties.getHive().getDatabase();
        String table = properties.getHive().getTable();
        
        return String.format(
            "INSERT INTO TABLE %s.%s " +
            "PARTITION (dt) " +
            "(id, original_data, transformed_data, source_topic, partition_id, offset_id, " +
            "process_time, data_timestamp, status, error_message, processing_time_ms, data_size) " +
            "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
            database, table
        );
    }

    /**
     * 设置PreparedStatement参数
     * 
     * @param statement PreparedStatement对象
     * @param data 处理后的数据
     * @throws SQLException SQL异常
     */
    private void setStatementParameters(PreparedStatement statement, ProcessedData data) throws SQLException {
        int index = 1;
        
        // 设置基本字段
        statement.setString(index++, data.getId());
        statement.setString(index++, data.getOriginalData());
        
        // 设置转换后的数据（JSON格式）
        String transformedDataJson = null;
        if (data.getTransformedData() != null) {
            try {
                transformedDataJson = objectMapper.writeValueAsString(data.getTransformedData());
            } catch (Exception e) {
                logger.warn("序列化转换数据失败: dataId={}", data.getId(), e);
            }
        }
        statement.setString(index++, transformedDataJson);
        
        // 设置其他字段
        statement.setString(index++, data.getSourceTopic());
        statement.setInt(index++, data.getPartition() != null ? data.getPartition() : 0);
        statement.setLong(index++, data.getOffset() != null ? data.getOffset() : 0);
        
        // 设置时间字段
        LocalDateTime processTime = data.getProcessTime();
        if (processTime != null) {
            statement.setTimestamp(index++, Timestamp.valueOf(processTime));
        } else {
            statement.setTimestamp(index++, Timestamp.valueOf(LocalDateTime.now()));
        }
        
        LocalDateTime dataTimestamp = data.getDataTimestamp();
        if (dataTimestamp != null) {
            statement.setTimestamp(index++, Timestamp.valueOf(dataTimestamp));
        } else {
            statement.setTimestamp(index++, Timestamp.valueOf(LocalDateTime.now()));
        }
        
        // 设置状态和错误信息
        statement.setString(index++, data.getStatus().name());
        statement.setString(index++, data.getErrorMessage());
        statement.setLong(index++, data.getProcessingTimeMs() != null ? data.getProcessingTimeMs() : 0);
        statement.setInt(index++, data.getDataSize() != null ? data.getDataSize() : 0);
        
        // 设置分区字段
        PartitionManager.PartitionInfo partitionInfo = partitionManager.getPartitionInfo(data);
        statement.setString(index++, partitionInfo.getPartitionValue());
        
        // 更新分区统计
        partitionInfo.incrementRecordCount();
    }

    /**
     * 关闭数据库资源
     * 
     * @param statement PreparedStatement对象
     * @param connection Connection对象
     */
    private void closeResources(PreparedStatement statement, Connection connection) {
        if (statement != null) {
            try {
                statement.close();
            } catch (SQLException e) {
                logger.warn("关闭PreparedStatement失败", e);
            }
        }
        
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException e) {
                logger.warn("关闭Connection失败", e);
            }
        }
    }

    /**
     * 创建Hive表（如果不存在）
     */
    public void createTableIfNotExists() {
        String createTableSql = buildCreateTableSql();
        
        try (Connection connection = dataSource.getConnection();
             PreparedStatement statement = connection.prepareStatement(createTableSql)) {
            
            statement.execute();
            logger.info("Hive表创建成功或已存在");
            
        } catch (SQLException e) {
            logger.error("创建Hive表失败", e);
            throw new RuntimeException("创建Hive表失败", e);
        }
    }

    /**
     * 构建创建表SQL语句
     * 
     * @return 创建表SQL语句
     */
    private String buildCreateTableSql() {
        String database = properties.getHive().getDatabase();
        String table = properties.getHive().getTable();
        
        return String.format(
            "CREATE TABLE IF NOT EXISTS %s.%s (" +
            "id STRING COMMENT '数据唯一标识', " +
            "original_data STRING COMMENT '原始数据', " +
            "transformed_data STRING COMMENT '转换后数据', " +
            "source_topic STRING COMMENT '数据来源主题', " +
            "partition_id INT COMMENT 'Kafka分区ID', " +
            "offset_id BIGINT COMMENT 'Kafka偏移量', " +
            "process_time TIMESTAMP COMMENT '处理时间', " +
            "data_timestamp TIMESTAMP COMMENT '数据时间戳', " +
            "status STRING COMMENT '处理状态', " +
            "error_message STRING COMMENT '错误信息', " +
            "processing_time_ms BIGINT COMMENT '处理耗时(毫秒)', " +
            "data_size INT COMMENT '数据大小(字节)'" +
            ") " +
            "COMMENT '数据网关处理结果表' " +
            "PARTITIONED BY (dt STRING COMMENT '日期分区') " +
            "STORED AS PARQUET " +
            "LOCATION '/user/hive/warehouse/%s.db/%s' " +
            "TBLPROPERTIES ('parquet.compression'='SNAPPY')",
            database, table, database, table
        );
    }

    /**
     * 获取写入统计信息
     * 
     * @return 写入统计信息
     */
    public WriteStatistics getWriteStatistics() {
        return new WriteStatistics(
            writeSuccessCount.get(),
            writeFailureCount.get(),
            totalWriteBytes.get()
        );
    }

    /**
     * 写入统计信息类
     */
    public static class WriteStatistics {
        private final long successCount;
        private final long failureCount;
        private final long totalBytes;

        public WriteStatistics(long successCount, long failureCount, long totalBytes) {
            this.successCount = successCount;
            this.failureCount = failureCount;
            this.totalBytes = totalBytes;
        }

        public long getSuccessCount() { return successCount; }
        public long getFailureCount() { return failureCount; }
        public long getTotalBytes() { return totalBytes; }
        public long getTotalCount() { return successCount + failureCount; }
        public double getSuccessRate() { 
            long total = getTotalCount();
            return total > 0 ? (double) successCount / total * 100 : 0; 
        }

        @Override
        public String toString() {
            return String.format("WriteStatistics{successCount=%d, failureCount=%d, " +
                               "totalBytes=%d, successRate=%.2f%%}",
                               successCount, failureCount, totalBytes, getSuccessRate());
        }
    }
}
