package com.meiya.hugegraph.mock;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * 交易数据消费者
 * 从Kafka消费交易数据并输出为CSV格式
 */
public class TransactionDataConsumer {
    private static final Logger logger = LoggerFactory.getLogger(TransactionDataConsumer.class);
    private static final String BOOTSTRAP_SERVERS = "192.168.182.128:9092";
    private static final String TOPIC = "transaction-data";
    private static final String GROUP_ID = "transaction-consumer-group";
    private static final String CSV_HEADER = "jykh,jyhm,jyje,jysj,dskh,dshm,ukey";
    private static final DateTimeFormatter INPUT_DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
    private static final DateTimeFormatter OUTPUT_DATE_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
    private static final ObjectMapper objectMapper = new ObjectMapper();

    /**
     * 消费Kafka消息并转换为CSV
     * @param outputFilePath CSV输出文件路径
     * @param maxRecords 最大消费记录数，-1表示持续消费
     * @param timeoutMs 轮询超时时间(毫秒)
     * @return 消费的记录数
     */
    public static int consumeTransactionToCSV(String outputFilePath, int maxRecords, long timeoutMs) {
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, GROUP_ID);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); // 从最新消息开始消费
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");

        AtomicInteger recordCount = new AtomicInteger(0);

        try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props)) {
            consumer.subscribe(Collections.singletonList(TOPIC));
            logger.info("开始从Kafka主题 {} 消费数据...", TOPIC);

            // 创建CSV文件和目录
            Path csvPath = Paths.get(outputFilePath);
            Files.createDirectories(csvPath.getParent());

            try (FileWriter writer = new FileWriter(csvPath.toFile())) {
                // 写入CSV标题
                writer.write(CSV_HEADER + "\n");

                while (maxRecords == -1 || recordCount.get() < maxRecords) {
                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(timeoutMs));

                    if (records.isEmpty()) {
                        logger.info("在{}毫秒内未收到新消息，继续等待...", timeoutMs);
                        continue;
                    }

                    for (ConsumerRecord<String, String> record : records) {
                        String line = processTransactionRecord(record.value());
                        writer.write(line + "\n");

                        int currentCount = recordCount.incrementAndGet();
                        if (currentCount % 100 == 0) {
                            logger.info("已处理 {} 条记录", currentCount);
                        }

                        if (maxRecords != -1 && currentCount >= maxRecords) {
                            break;
                        }
                    }

                    // 刷新写入器确保数据写入磁盘
                    writer.flush();
                }
            }

            logger.info("CSV文件生成完成，共处理 {} 条记录，已写入: {}", recordCount.get(), outputFilePath);
            return recordCount.get();
        } catch (Exception e) {
            logger.error("消费数据并转换为CSV时出错: {}", e.getMessage(), e);
            return recordCount.get();
        }
    }

    /**
     * 处理交易记录并转换为CSV行
     * @param jsonRecord JSON格式的交易记录
     * @return CSV格式的行数据
     */
    private static String processTransactionRecord(String jsonRecord) throws IOException {
        JsonNode node = objectMapper.readTree(jsonRecord);

        // 提取需要的字段
        String jykh = getStringValue(node, "jykh");
        String jyhm = getStringValue(node, "jyfhm");
        String jyje = getStringValue(node, "jyje");
        String jysjStr = getStringValue(node, "jysj");
        String dskh = getStringValue(node, "jydszkh");
        String dshm = getStringValue(node, "dshm");
        String ukey = getStringValue(node, "ukey");

        // 转换日期格式
        String formattedDate = formatDate(jysjStr);

        // 组合CSV行
        return String.join(",",
                jykh,
                jyhm,
                jyje,
                formattedDate,
                dskh,
                dshm,
                ukey);
    }

    /**
     * 从JsonNode获取字符串值，如果为null则返回"0"
     */
    private static String getStringValue(JsonNode node, String fieldName) {
        JsonNode field = node.get(fieldName);
        if (field == null || field.isNull()) {
            return "0";
        }
        return field.asText();
    }

    /**
     * 转换日期格式，从 yyyy-MM-dd HH:mm:ss 到 yyyy/MM/dd HH:mm
     */
    private static String formatDate(String dateStr) {
        if (dateStr == null || dateStr.isEmpty() || dateStr.equals("0")) {
            return "0";
        }

        try {
            LocalDateTime dateTime = LocalDateTime.parse(dateStr, INPUT_DATE_FORMATTER);
            return dateTime.format(OUTPUT_DATE_FORMATTER);
        } catch (Exception e) {
            logger.warn("无法解析日期: {}, 返回原始值", dateStr);
            return dateStr;
        }
    }

    /**
     * 主方法，用于独立运行测试
     */
    public static void main(String[] args) {
        String outputPath = "loader/transaction_data.csv";
        int recordsToConsume = -1; // 设置为-1表示持续消费
        long pollTimeout = 5000; // 轮询超时时间(毫秒)

        // 解析命令行参数
        if (args.length > 0) {
            outputPath = args[0];
        }
        if (args.length > 1) {
            try {
                recordsToConsume = Integer.parseInt(args[1]);
            } catch (NumberFormatException e) {
                logger.warn("无效的记录数参数: {}, 使用默认值: {}", args[1], recordsToConsume);
            }
        }

        // 消费并生成CSV
        int consumedRecords = consumeTransactionToCSV(outputPath, recordsToConsume, pollTimeout);
        logger.info("程序执行完成，共消费 {} 条记录", consumedRecords);
    }
}