package com.util;

import org.apache.commons.io.FileUtils;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;

public class FileToKafkaLaunch {

    // 修改一下，修改成单进程，多文件，多线程，从写入端控制定时写入，凌晨1点开始写
    // kafka发送应该加上文件名和文件头，文件名作为唯一标志方便查询后置容器日志

    private static final Logger logger = LoggerFactory.getLogger(FileToKafkaLaunch.class);

    private static TreeMap<String, String> dealMap = new TreeMap<>();
    private final ReentrantLock lock = new ReentrantLock();

    public static void main(String[] args) throws InterruptedException {

        long startTime = System.currentTimeMillis();

        String configPath = "src\\main\\resources\\config.properties";

        // 获取配置文件
        String fccPath = "";
        String fileTypeTopic = "";
        String fileEncoding = "";
        String fileHead = "";
        String fileNum = "";
        String kafkaAddress = "";
        String kafkaAsks = "";
        String kafkaRetry = "";
        String rsync = "";
        String rsyncTopic = "";
        String code = "";

        // 读取配置文件
        Properties prop = new Properties();
        try (InputStream configInputStream = new FileInputStream(configPath)) {
            // 加载属性文件
            prop.load(configInputStream);

            // 获取属性值
            fccPath = prop.getProperty("fccPath");
            fileTypeTopic = prop.getProperty("fileTypeTopic");
            fileEncoding = prop.getProperty("fileEncoding");
            fileHead = prop.getProperty("fileHead");
            fileNum = prop.getProperty("fileNum");
            kafkaAddress = prop.getProperty("kafkaAddress");
            kafkaAsks = prop.getProperty("kafkaAsks");
            kafkaRetry = prop.getProperty("kafkaRetry");
            rsync = prop.getProperty("rsync");
            rsyncTopic = prop.getProperty("rsyncTopic");
            code = prop.getProperty("code");  // 发kafka带上机器识别码

        } catch (IOException ex) {
            ex.printStackTrace();
        }

        for (Object key : prop.keySet()) {
            String value = prop.getProperty(key.toString());
            if(null == value || "".equals(value)){
                logger.error("配置不正确");
                return;
            }
        }

        if("true".equals(rsync)){
            String kafkaAddressStr = kafkaAddress;
            String codeStr = code;
            // 订阅主题
            String topic = rsyncTopic;
            new Thread(() -> {

                // 设置消费者配置
                Properties props = new Properties();
                props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaAddressStr); // Kafka broker 地址
                props.put(ConsumerConfig.GROUP_ID_CONFIG, codeStr); // 消费者组 ID
                props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
                props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
                props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // 如果没有初始偏移量或当前偏移量不再存在，则从最早的消息开始

                // 创建 Kafka 消费者实例
                KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);

                consumer.subscribe(Collections.singletonList(topic));

                try {
                    while (true) {
                        // 拉取消息
                        ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));

                        // 处理消息
                        for (ConsumerRecord<String, String> record : records) {
//                            System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
                            String[] split = record.value().split("@@");
                            if(codeStr.equals(split[0])){
                                continue;
                            }
                            File file = new File(split[2].replace("move", "data"));
                            if(!file.exists()){
                                continue;
                            }
                            FileUtils.moveFile(file, new File(split[2].replace("move", "done")));
                        }
                    }
                } catch (IOException e) {
                    throw new RuntimeException(e);
                } finally {
                    // 关闭消费者
                    consumer.close();
                }

            }).start();
        }

        logger.info(prop.toString());

        // 解析文件类型@@topic配置
        Map<String, String> fileTypeTopicMap = parseConfig(fileTypeTopic);
        Set<String> fileTypeTopicSet = fileTypeTopicMap.keySet();

        // 解析文件类型@@文件编码配置
        Map<String, String> fileEncodingMap = parseConfig(fileEncoding);

        // 解析文件类型@@文件头配置
        Map<String, String> fileHeadMap = parseConfig(fileHead);

        // 递归路径下的文件，如果类型符合，则进行移动
        File dirFile = new File(fccPath);

        if(dirFile.isFile()){
            return;
        }

        dealSonFile(dirFile);

        Map<String, String> dealMoveMap = new TreeMap<>();

        dealMap.forEach((k, v) ->{
            if(!fileTypeTopicSet.contains(k.substring(0, k.lastIndexOf("_"))) && v.contains("data") ){
                String targetFile = v.replace("data", "move");
                try {
                    FileUtils.moveFile(new File(v), new File(targetFile));
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
            }
        });

        // 创建kafka生产者
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaAddress);
        properties.put(ProducerConfig.ACKS_CONFIG, kafkaAsks);
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        if("true".equals(kafkaRetry)){
            properties.put(ProducerConfig.RETRIES_CONFIG, 5);
            properties.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, 200);
            properties.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, 120000);
        }
        KafkaProducerPool kafkaProducerPool = KafkaProducerPool.getInstance(properties, Integer.parseInt(fileNum));

        // 创建一个线程池
        ExecutorService executorService = Executors.newFixedThreadPool(Integer.parseInt(fileNum));

        // 开始处理move文件夹下的文件
        String codeStr = code;
        String rsyncTopicStr = rsyncTopic;
        dealMoveMap.forEach((k, v) ->{
            String fileEncodingStr = fileEncodingMap.getOrDefault(k.substring(0, k.lastIndexOf("_")), "UTF-8").toString();
            String headLineStr = fileHeadMap.getOrDefault(k.substring(0, k.lastIndexOf("_")), "").toString();
            String kafkaTopicStr = fileTypeTopicMap.get(k.substring(0, k.lastIndexOf("_"))).toString();

            executorService.submit(new KafkaDataSendTaskThread(codeStr, v, headLineStr, fileEncodingStr, kafkaProducerPool, kafkaTopicStr, rsyncTopicStr));
        });


        try {
            // 等待所有任务完成，或者超时（这里设置为等待无限长时间）
            if (!executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS)) {
                // 如果线程池在等待过程中没有正常终止（通常不会发生，除非被中断）
                System.err.println("ExecutorService did not terminate!");
            }
        } catch (InterruptedException e) {
            // 当前线程在等待过程中被中断
            executorService.shutdownNow(); // 尝试立即停止所有正在执行的任务
            Thread.currentThread().interrupt(); // 保留中断状态
        }

        // 所有任务都已完成，可以继续执行其他逻辑
        System.out.println("All tasks are finished.");
        kafkaProducerPool.close();

        long endTime = System.currentTimeMillis();

//        logger.info("结束时间为：" + endTime + "，花费时间为：" + (endTime - startTime) + "ms");
        System.out.println("结束时间为：" + endTime + "，花费时间为：" + (endTime - startTime) + "ms\n");


    }

    private static void dealSonFile(File dirFile){

        File[] files = dirFile.listFiles();
        for (File file : files) {
            if(file.isFile()){
                String absolutePath = file.getAbsolutePath();
                    dealMap.put(file.getName(), absolutePath);
            }else {
                dealSonFile(file);
            }
        }

    }

    private static Map<String, String> parseConfig(String config){

        Map<String, String> map = new HashMap<>();
        String[] splits = config.split(",");
        for (String split : splits) {
            String[] strings = split.split("@@");
                map.put(strings[0], strings[1]);
        }
        return map;

    }

    private static boolean moveFile(String sourcePath, String targetPath) {
        File sourceFile = new File(sourcePath);
        File targetFile = new File(targetPath);

        if (!targetFile.getParentFile().exists()) {
            targetFile.getParentFile().mkdirs();
        }

        return sourceFile.renameTo(targetFile);
    }



}
