package org.groupg.filetodb;

import cn.hutool.core.date.DateUtil;
import cn.hutool.core.date.TimeInterval;
import com.lmax.disruptor.*;
import com.lmax.disruptor.dsl.Disruptor;
import com.lmax.disruptor.dsl.ProducerType;
import org.apache.commons.csv.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.*;

public class DisruptorCommonsCsvProcessing {

    private static final Logger logger = LoggerFactory.getLogger(DisruptorCommonsCsvProcessing.class);

    // CSV格式配置
    private static final CSVFormat CSV_FORMAT = CSVFormat.DEFAULT
            .withFirstRecordAsHeader()
            .withIgnoreHeaderCase()
            .withTrim();

    // 定义事件类
    public static class DataEvent {
        private List<CSVRecord> records;  // Commons CSV记录集合
        private int batchType;            // 1=CSV批次, 2=DB批次
        private int batchSize;            // 当前批次大小

        public void clear() {
            records = null;
            batchType = 0;
            batchSize = 0;
        }

        // Getters and Setters
        public List<CSVRecord> getRecords() {
            return records;
        }

        public void setRecords(List<CSVRecord> records) {
            this.records = records;
        }

        public int getBatchType() {
            return batchType;
        }

        public void setBatchType(int batchType) {
            this.batchType = batchType;
        }

        public int getBatchSize() {
            return batchSize;
        }

        public void setBatchSize(int batchSize) {
            this.batchSize = batchSize;
        }
    }

    // 事件工厂
    public static class DataEventFactory implements EventFactory<DataEvent> {
        @Override
        public DataEvent newInstance() {
            return new DataEvent();
        }
    }

    // CSV文件读取处理器
    public static class CsvFileReader implements EventHandler<DataEvent> {
        private final RingBuffer<DataEvent> ringBuffer;
        private final Path csvFilePath;
        private List<CSVRecord> buffer = new ArrayList<>(10000);

        public CsvFileReader(RingBuffer<DataEvent> ringBuffer, Path csvFilePath) {
            this.ringBuffer = ringBuffer;
            this.csvFilePath = csvFilePath;
        }

        @Override
        public void onEvent(DataEvent event, long sequence, boolean endOfBatch) {
            logger.info("Processing data event");
            try (Reader reader = Files.newBufferedReader(csvFilePath);
                 CSVParser parser = new CSVParser(reader, CSV_FORMAT)) {

                for (CSVRecord record : parser) {
                    buffer.add(record);

                    // 每1万条发布一个批次
                    if (buffer.size() >= 10000) {
                        publishBatch(buffer);
                        buffer.clear();
                    }
                }

                // 处理剩余记录
                if (!buffer.isEmpty()) {
                    publishBatch(buffer);
                    buffer.clear();
                }

            } catch (IOException e) {
                logger.error("CSV文件读取错误", e);
            }
        }

        private void publishBatch(List<CSVRecord> batch) {
            long sequence = ringBuffer.next();
            try {
                DataEvent batchEvent = ringBuffer.get(sequence);
                batchEvent.setRecords(new ArrayList<>(batch));
                batchEvent.setBatchType(1);
                batchEvent.setBatchSize(batch.size());
                logger.info("发布CSV批次: {} 条记录", batch.size());
            } finally {
                ringBuffer.publish(sequence);
            }
        }
    }

    // 数据库批次处理器
    public static class DbBatchProcessor implements EventHandler<DataEvent> {
        private final RingBuffer<DataEvent> ringBuffer;

        public DbBatchProcessor(RingBuffer<DataEvent> ringBuffer) {
            this.ringBuffer = ringBuffer;
        }

        @Override
        public void onEvent(DataEvent event, long sequence, boolean endOfBatch) {
            if (event.getBatchType() != 1) return;

            List<CSVRecord> records = event.getRecords();
            logger.info("处理CSV批次: {} 条记录", records.size());

            // 拆分为1000条的小批次
            for (int i = 0; i < records.size(); i += 1000) {
                int toIndex = Math.min(i + 1000, records.size());
                List<CSVRecord> batch = records.subList(i, toIndex);
                publishDbBatch(new ArrayList<>(batch));
            }
        }

        private void publishDbBatch(List<CSVRecord> batch) {
            long sequence = ringBuffer.next();
            try {
                DataEvent dbEvent = ringBuffer.get(sequence);
                dbEvent.setRecords(batch);
                dbEvent.setBatchType(2);
                dbEvent.setBatchSize(batch.size());
                logger.debug("发布DB批次: {} 条记录", batch.size());
            } finally {
                ringBuffer.publish(sequence);
            }
        }
    }

    // 数据库插入处理器
    public static class DbInsertHandler implements EventHandler<DataEvent> {
        @Override
        public void onEvent(DataEvent event, long sequence, boolean endOfBatch) {
            if (event.getBatchType() != 2) return;

            List<CSVRecord> records = event.getRecords();
            try {
                // 模拟数据库插入
                logger.info("正在插入 {} 条记录到数据库", records.size());
                // 实际数据库操作应在此处实现
                // insertToDatabase(records);

                event.clear();
            } catch (Exception e) {
                logger.error("数据库插入错误", e);
            }
        }
    }

    // 异常处理器
    public static class DataExceptionHandler implements ExceptionHandler<DataEvent> {
        @Override
        public void handleEventException(Throwable ex, long sequence, DataEvent event) {
            logger.error("处理事件时出错 [序列:{}]: {}", sequence, ex.getMessage(), ex);
        }

        @Override
        public void handleOnStartException(Throwable ex) {
            logger.error("启动时出错: {}", ex.getMessage(), ex);
        }

        @Override
        public void handleOnShutdownException(Throwable ex) {
            logger.error("关闭时出错: {}", ex.getMessage(), ex);
        }
    }

    public static void main(String[] args) throws Exception {
        // 1. 准备CSV文件路径
        Path csvFile = Paths.get("E:\\Develops\\demo001-100w.csv");
        logger.info("正在处理CSV文件: {}", csvFile.toAbsolutePath());

        TimeInterval interval = new TimeInterval();
        interval.start();
        // 2. 配置Disruptor
        Executor executor = Executors.newCachedThreadPool();
        int bufferSize = 1024;

        Disruptor<DataEvent> disruptor = new Disruptor<>(
                new DataEventFactory(),
                bufferSize,
                Executors.defaultThreadFactory(),
                ProducerType.SINGLE,  // 单生产者
                new BlockingWaitStrategy()
        );

        // 3. 获取RingBuffer引用
        RingBuffer<DataEvent> ringBuffer = disruptor.getRingBuffer();

        // 4. 设置处理器链
        disruptor.handleEventsWith(new CsvFileReader(ringBuffer, csvFile))
                .then(new DbBatchProcessor(ringBuffer))
                .then(new DbInsertHandler());

        // 5. 设置异常处理
        disruptor.setDefaultExceptionHandler(new DataExceptionHandler());

        // 6. 启动Disruptor
        disruptor.start();
        logger.info("Disruptor启动成功");

        // 7. 发布初始事件触发处理
        long sequence = ringBuffer.next();
        try {
            DataEvent initEvent = ringBuffer.get(sequence);
            initEvent.setBatchType(0); // 初始化事件
        } finally {
            ringBuffer.publish(sequence);
        }


        // 8. 等待处理完成
        while (disruptor.getRingBuffer().getCursor() < sequence) {
            Thread.sleep(100);
        }

        disruptor.shutdown();
        logger.debug("处理时长：{}", DateUtil.formatBetween(interval.interval()));
        logger.info("Disruptor已关闭");
    }
}
