package com.by.log.handler;

import com.by.log.config.LogProperties;
import com.by.log.constants.LogLevel;
import com.by.log.entity.LogEntity;
import com.by.log.entity.LogEntry;
import com.by.log.entity.StatisticData;
import com.by.log.mapper.LogMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.*;

/**
 * 默认日志存储实现
 * 支持终端和文件两种输出方式
 */
@Component
public class DefaultLogStorage implements LogStorage {
    private final String logFilePath;
    private final boolean asyncMode;
    private PrintWriter fileWriter;

    @Autowired
    private LogMapper logMapper;

    @Autowired(required = false)
    private LogProperties logProperties;

    // 用于文件日志的线程池
    private final ExecutorService fileExecutorService;

    // 用于数据库日志的线程池
    private final ExecutorService dbExecutorService;

    // 日志缓冲队列
    private final BlockingQueue<LogEntry> fileLogQueue;
    private final BlockingQueue<LogEntry> dbLogQueue;

    // 统计数据缓冲队列
    private final BlockingQueue<StatisticData> statisticQueue;

    // 批量处理线程
    private final Thread fileBatchThread;
    private final Thread dbBatchThread;
    private final Thread statisticBatchThread;

    // 控制批量处理线程运行的标志
    private volatile boolean running = true;
    
    // 随机数生成器，用于采样
    private final Random random = new Random();

    // 默认配置值
    private static final int DEFAULT_BATCH_SIZE = 100;
    private static final long DEFAULT_BATCH_INTERVAL = 5000;
    private static final String DEFAULT_DB_LOG_LEVEL = "WARN";
    private static final String DEFAULT_FILE_LOG_LEVEL = "INFO";
    private static final double DEFAULT_SECKILL_SAMPLE_RATE = 0.1;

    public DefaultLogStorage() {
        this(null, true);
    }

    public DefaultLogStorage(String logFilePath, boolean asyncMode) {
        this.logFilePath = logFilePath != null ? logFilePath : "./logs/miaosha.log";
        this.asyncMode = asyncMode;

        // 初始化线程池
        this.fileExecutorService = Executors.newFixedThreadPool(2);
        this.dbExecutorService = Executors.newFixedThreadPool(2);

        // 初始化队列
        this.fileLogQueue = new LinkedBlockingQueue<>();
        this.dbLogQueue = new LinkedBlockingQueue<>();
        this.statisticQueue = new LinkedBlockingQueue<>();

        // 创建日志目录（如果不存在）
        createLogDirectory();

        // 初始化文件写入器
        initFileWriter();

        // 启动批量处理线程
        this.fileBatchThread = new Thread(this::processFileLogsBatch);
        this.fileBatchThread.setDaemon(true);
        this.fileBatchThread.start();

        this.dbBatchThread = new Thread(this::processDbLogsBatch);
        this.dbBatchThread.setDaemon(true);
        this.dbBatchThread.start();

        this.statisticBatchThread = new Thread(this::processStatisticBatch);
        this.statisticBatchThread.setDaemon(true);
        this.statisticBatchThread.start();
    }

    private void createLogDirectory() {
        try {
            File logFile = new File(this.logFilePath);
            File logDir = logFile.getParentFile();
            if (logDir != null && !logDir.exists()) {
                logDir.mkdirs();
            }
        } catch (Exception e) {
            System.err.println("Failed to create log directory: " + e.getMessage());
        }
    }

    private void initFileWriter() {
        try {
            fileWriter = new PrintWriter(new FileWriter(logFilePath, true));
        } catch (IOException e) {
            System.err.println("Failed to create file writer for log file: " + logFilePath);
        }
    }

    @Override
    public void storeLog(LogEntry entry) {
        // 秒杀场景下进行采样，减少日志量
        if (shouldSampleLog(entry)) {
            if (asyncMode) {
                // 异步模式下，将日志添加到队列中
                if (entry.getLevel() != null && entry.getLevel().isGreaterOrEqual(LogLevel.WARN)) {
                    // 警告及以上级别日志存储到数据库
                    dbLogQueue.offer(entry);
                } else {
                    // 其他日志存储到文件
                    fileLogQueue.offer(entry);
                }
            } else {
                // 同步模式下，直接处理日志
                if (entry.getLevel() != null && entry.getLevel().isGreaterOrEqual(LogLevel.WARN)) {
                    // 警告及以上级别日志存储到数据库
                    processDbLog(entry);
                } else {
                    // 其他日志存储到文件
                    processFileLog(entry);
                }
            }
        }
    }
    
    /**
     * 判断是否应该采样记录该日志
     * @param entry 日志条目
     * @return 是否采样记录
     */
    private boolean shouldSampleLog(LogEntry entry) {
        // 如果是错误级别日志，总是记录
        if (entry.getLevel() != null && entry.getLevel().isGreaterOrEqual(LogLevel.ERROR)) {
            return true;
        }
        
        // 检查是否是秒杀相关日志
        String message = entry.getMessage();
        boolean isSeckillLog = message != null && (
            message.contains("秒杀") || 
            message.contains("seckill") ||
            entry.getServiceName() != null && entry.getServiceName().contains("seckill")
        );
        
        // 如果不是秒杀相关日志，总是记录
        if (!isSeckillLog) {
            return true;
        }
        
        // 获取当前采样率
        double sampleRate = getSeckillSampleRate();
        
        // 动态调整采样率
        if (logProperties != null && logProperties.isEnableDynamicSampling()) {
            LocalTime now = LocalTime.now();
            int hour = now.getHour();
            // 在高峰期降低采样率
            if (hour >= logProperties.getPeakStartHour() && hour < logProperties.getPeakEndHour()) {
                sampleRate *= 0.5; // 高峰期再降低一半采样率
            }
        }
        
        // 根据采样率决定是否记录
        return random.nextDouble() < sampleRate;
    }
    
    /**
     * 获取秒杀场景下的采样率
     * @return 采样率
     */
    private double getSeckillSampleRate() {
        return logProperties != null ? logProperties.getSeckillSampleRate() : DEFAULT_SECKILL_SAMPLE_RATE;
    }

    @Override
    public void storeStatistic(StatisticData data) {
        // 异步模式下，将统计数据添加到队列中
        if (running) {
            statisticQueue.offer(data);
        }
    }

    private void processFileLog(LogEntry entry) {
        fileExecutorService.submit(() -> {
            try {
                LogLevel fileLogLevel = LogLevel.valueOf(getFileLogLevel());
                LogLevel level = entry.getLevel() != null ? entry.getLevel() : LogLevel.INFO;
                if (level.isGreaterOrEqual(fileLogLevel)) {
                    writeToFile(formatLogEntry(entry));
                }
            } catch (Exception e) {
                System.err.println("Error writing log to file: " + e.getMessage());
            }
        });
    }

    private void processDbLog(LogEntry entry) {
        dbExecutorService.submit(() -> {
            try {
                LogLevel dbLogLevel = LogLevel.valueOf(getDbLogLevel());
                LogLevel level = entry.getLevel() != null ? entry.getLevel() : LogLevel.INFO;
                if (level.isGreaterOrEqual(dbLogLevel)) {
                    storeToDatabase(entry);
                }
            } catch (Exception e) {
                System.err.println("Error storing log to database: " + e.getMessage());
            }
        });
    }

    private void processFileLogsBatch() {
        List<LogEntry> batch = new ArrayList<>();
        while (running) {
            try {
                // 等待获取第一个元素
                LogEntry entry = fileLogQueue.poll(getBatchInterval(), TimeUnit.MILLISECONDS);
                if (entry != null) {
                    batch.add(entry);
                }

                // 尝试获取更多元素以组成一批
                fileLogQueue.drainTo(batch, getBatchSize() - batch.size());

                // 处理批次
                if (!batch.isEmpty()) {
                    processFileBatch(batch);
                    batch.clear();
                }
            } catch (InterruptedException e) {
                // 线程被中断，退出循环
                Thread.currentThread().interrupt();
                break;
            } catch (Exception e) {
                System.err.println("Error processing file log batch: " + e.getMessage());
                e.printStackTrace();
            }
        }
    }

    private void processDbLogsBatch() {
        List<LogEntry> batch = new ArrayList<>();
        while (running) {
            try {
                // 等待获取第一个元素
                LogEntry entry = dbLogQueue.poll(getBatchInterval(), TimeUnit.MILLISECONDS);
                if (entry != null) {
                    batch.add(entry);
                }

                // 尝试获取更多元素以组成一批
                dbLogQueue.drainTo(batch, getBatchSize() - batch.size());

                // 处理批次
                if (!batch.isEmpty()) {
                    processDbBatch(batch);
                    batch.clear();
                }
            } catch (InterruptedException e) {
                // 线程被中断，退出循环
                Thread.currentThread().interrupt();
                break;
            } catch (Exception e) {
                System.err.println("Error processing database log batch: " + e.getMessage());
                e.printStackTrace();
            }
        }
    }

    private void processStatisticBatch() {
        List<StatisticData> batch = new ArrayList<>();
        while (running) {
            try {
                // 等待获取第一个元素
                StatisticData data = statisticQueue.poll(getBatchInterval(), TimeUnit.MILLISECONDS);
                if (data != null) {
                    batch.add(data);
                }

                // 尝试获取更多元素以组成一批
                statisticQueue.drainTo(batch, getBatchSize() - batch.size());

                // 处理批次
                if (!batch.isEmpty()) {
                    processStatisticDataBatch(batch);
                    batch.clear();
                }
            } catch (InterruptedException e) {
                // 线程被中断，退出循环
                Thread.currentThread().interrupt();
                break;
            } catch (Exception e) {
                System.err.println("Error processing statistic data batch: " + e.getMessage());
                e.printStackTrace();
            }
        }
    }

    private void processFileBatch(List<LogEntry> batch) {
        fileExecutorService.submit(() -> {
            for (LogEntry entry : batch) {
                try {
                    LogLevel fileLogLevel = LogLevel.valueOf(getFileLogLevel());
                    LogLevel level = entry.getLevel() != null ? entry.getLevel() : LogLevel.INFO;
                    if (level.isGreaterOrEqual(fileLogLevel)) {
                        writeToFile(formatLogEntry(entry));
                    }
                } catch (Exception e) {
                    System.err.println("Error writing log to file: " + e.getMessage());
                }
            }
        });
    }

    private void processDbBatch(List<LogEntry> batch) {
        dbExecutorService.submit(() -> {
            try {
                List<LogEntity> logEntityList = new ArrayList<>();
                LogLevel dbLogLevel = LogLevel.valueOf(getDbLogLevel());

                for (LogEntry entry : batch) {
                    LogLevel level = entry.getLevel() != null ? entry.getLevel() : LogLevel.INFO;
                    if (level.isGreaterOrEqual(dbLogLevel)) {
                        logEntityList.add(LogEntity.fromLogEntry(entry));
                    }
                }

                if (!logEntityList.isEmpty() && logMapper != null) {
                    logMapper.insertBatchSomeColumn(logEntityList);
                }
            } catch (Exception e) {
                System.err.println("Error storing logs to database: " + e.getMessage());
                e.printStackTrace();
                // 出错时逐条插入
                storeToDatabaseFallback(batch);
            }
        });
    }

    private void processStatisticDataBatch(List<StatisticData> batch) {
        // 统计数据的处理逻辑，这里简化处理，只是打印到控制台
        for (StatisticData data : batch) {
            System.out.println("Statistic Data: " + data);
        }
    }

    private void storeToDatabaseFallback(List<LogEntry> batch) {
        LogLevel dbLogLevel = LogLevel.valueOf(getDbLogLevel());
        for (LogEntry entry : batch) {
            try {
                LogLevel level = entry.getLevel() != null ? entry.getLevel() : LogLevel.INFO;
                if (level.isGreaterOrEqual(dbLogLevel)) {
                    storeToDatabase(entry);
                }
            } catch (Exception e) {
                System.err.println("Error storing log to database (fallback): " + e.getMessage());
            }
        }
    }

    private void storeToDatabase(LogEntry entry) {
        try {
            if (logMapper != null) {
                LogEntity logEntity = LogEntity.fromLogEntry(entry);
                logMapper.insert(logEntity);
            }
        } catch (Exception e) {
            System.err.println("Error storing log to database: " + e.getMessage());
        }
    }

    private String formatLogEntry(LogEntry entry) {
        StringBuilder sb = new StringBuilder();
        sb.append("[")
                .append(entry.getTimestamp().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS")))
                .append("] [")
                .append(entry.getLevel())
                .append("] [")
                .append(entry.getThreadName())
                .append("] [")
                .append(entry.getClassName())
                .append("] ");

        // 添加追踪信息
        if (entry.getTraceId() != null) {
            sb.append("[TraceId: ").append(entry.getTraceId()).append("] ");
        }

        if (entry.getSpanId() != null) {
            sb.append("[SpanId: ").append(entry.getSpanId()).append("] ");
        }

        if (entry.getParentSpanId() != null) {
            sb.append("[ParentSpanId: ").append(entry.getParentSpanId()).append("] ");
        }

        if (entry.getServiceName() != null) {
            sb.append("[Service: ").append(entry.getServiceName()).append("] ");
        }

        if (entry.getMethodName() != null) {
            sb.append("[Method: ").append(entry.getMethodName()).append("] ");
        }

        sb.append(entry.getMessage() != null ? entry.getMessage() : "");

        if (entry.getExecutionTime() != null) {
            sb.append(" [ExecTime: ").append(entry.getExecutionTime()).append("ms]");
        }

        if (entry.getSuccess() != null) {
            sb.append(" [Success: ").append(entry.getSuccess()).append("]");
        }

        return sb.toString();
    }

    private void writeToFile(String message) {
        if (fileWriter != null) {
            fileWriter.println(message);
            fileWriter.flush();
        } else {
            // 如果文件写入器不可用，直接输出到控制台
            System.out.println(message);
        }
    }

    @Override
    public void shutdown() {
        // 停止接收新任务
        running = false;

        try {
            // 等待批量处理线程完成当前任务
            fileBatchThread.join(1000); // 等待最多1秒
            dbBatchThread.join(1000);   // 等待最多1秒
            statisticBatchThread.join(1000); // 等待最多1秒
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        }

        // 关闭线程池，但先处理队列中剩余的任务
        List<Runnable> remainingFileTasks = new ArrayList<>();
        List<Runnable> remainingDbTasks = new ArrayList<>();

        // 将队列中剩余的日志条目直接处理
        processRemainingLogs();

        fileExecutorService.shutdown();
        dbExecutorService.shutdown();

        try {
            // 等待线程结束
            if (!fileExecutorService.awaitTermination(5, TimeUnit.SECONDS)) {
                fileExecutorService.shutdownNow();
                // 再等待一次
                if (!fileExecutorService.awaitTermination(5, TimeUnit.SECONDS)) {
                    System.err.println("File executor service did not terminate");
                }
            }
            if (!dbExecutorService.awaitTermination(5, TimeUnit.SECONDS)) {
                dbExecutorService.shutdownNow();
                // 再等待一次
                if (!dbExecutorService.awaitTermination(5, TimeUnit.SECONDS)) {
                    System.err.println("Database executor service did not terminate");
                }
            }
        } catch (InterruptedException e) {
            fileExecutorService.shutdownNow();
            dbExecutorService.shutdownNow();
            Thread.currentThread().interrupt();
        }

        // 关闭文件写入器
        if (fileWriter != null) {
            fileWriter.close();
        }
    }

    /**
     * 处理队列中剩余的日志条目
     */
    private void processRemainingLogs() {
        // 处理文件日志队列中剩余的日志
        LogEntry fileEntry;
        while ((fileEntry = fileLogQueue.poll()) != null) {
            try {
                LogLevel fileLogLevel = LogLevel.valueOf(getFileLogLevel());
                LogLevel level = fileEntry.getLevel() != null ? fileEntry.getLevel() : LogLevel.INFO;
                if (level.isGreaterOrEqual(fileLogLevel)) {
                    writeToFile(formatLogEntry(fileEntry));
                }
            } catch (Exception e) {
                System.err.println("Error processing remaining file log: " + e.getMessage());
            }
        }

        // 处理数据库日志队列中剩余的日志
        LogEntry dbEntry;
        while ((dbEntry = dbLogQueue.poll()) != null) {
            try {
                LogLevel dbLogLevel = LogLevel.valueOf(getDbLogLevel());
                LogLevel level = dbEntry.getLevel() != null ? dbEntry.getLevel() : LogLevel.INFO;
                if (level.isGreaterOrEqual(dbLogLevel)) {
                    storeToDatabase(dbEntry);
                }
            } catch (Exception e) {
                System.err.println("Error processing remaining database log: " + e.getMessage());
            }
        }

        // 处理统计数据队列中剩余的数据
        StatisticData statData;
        while ((statData = statisticQueue.poll()) != null) {
            try {
                System.out.println("Remaining Statistic Data: " + statData);
            } catch (Exception e) {
                System.err.println("Error processing remaining statistic data: " + e.getMessage());
            }
        }
    }

    // Getter方法，提供默认值
    private int getBatchSize() {
        return logProperties != null ? logProperties.getBatchSize() : DEFAULT_BATCH_SIZE;
    }

    private long getBatchInterval() {
        return logProperties != null ? logProperties.getBatchInterval() : DEFAULT_BATCH_INTERVAL;
    }

    private String getDbLogLevel() {
        return logProperties != null ? logProperties.getDbLogLevel() : DEFAULT_DB_LOG_LEVEL;
    }

    private String getFileLogLevel() {
        return logProperties != null ? logProperties.getFileLogLevel() : DEFAULT_FILE_LOG_LEVEL;
    }
}