package com.nh.log.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.nh.common.pojo.Log;
import com.nh.log.client.ReceptionFeignClient;
import com.nh.log.service.LogService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;

import static com.alibaba.fastjson.JSON.toJSONString;

/**
 * @Author: LaoCao
 * @Date: 2023/3/16 8:35
 * @Version: 1.0
 **/
@Service
public class LogServiceImpl implements LogService {
    private static final Random random = new Random();
    private static final String AP_A = "D:\\log\\logA\\";
    private static final String AP_B = "D:\\log\\logB\\";
    @Resource
    private RabbitTemplate rabbitTemplate;
    @Resource
    private RedisTemplate<String, Object> redisTemplate;
    @Resource
    private AbstractExecutorService threadPoolExecutor;
    @Resource
    private ReceptionFeignClient receptionFeignClient;

    @Override
    //日志打印
    public void logPrint() {
        CompletableFuture.runAsync(() -> {
            JSONObject jsonObjectA = new JSONObject();
            JSONObject jsonObjectB = new JSONObject();
            long start = System.currentTimeMillis();
            SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");
            String format = simpleDateFormat.format(new Date());
            //从redis中获取自增id
            //20230316
            Long id = redisTemplate.opsForValue().increment("log:" + format + ":incr:id");
            redisTemplate.expire("log:" + format + ":incr:id", 1, TimeUnit.DAYS);
            //打印日志到A文件
            Logger loggerA = LoggerFactory.getLogger("loggerA");
            int num = random.nextInt(2);
            jsonObjectA.put("logId", format + id);
            jsonObjectA.put("tag", num == 0 ? "A" : "B");
            loggerA.info(jsonObjectA.toJSONString());
            //打印日志到B文件
            num = random.nextInt(2);
            jsonObjectB.put("logId", format + id);
            jsonObjectB.put("tag", num == 0 ? "A" : "B");
            Logger loggerB = LoggerFactory.getLogger("loggerB");
            loggerB.info(jsonObjectB.toJSONString());

            long end = System.currentTimeMillis();
            System.out.println("id:" + id + ",执行时间：" + (end - start));
            //清空数据
            jsonObjectA.clear();
            jsonObjectB.clear();
        }, threadPoolExecutor);
    }

    @Override
    //日志分析
    public void logAnalyse(String date) {
        File fileA = new File(AP_A + date);
//        File fileB = new File("D:\\log\\logB\\"+date);
        File fileB = new File(AP_B + date);
        //获取对应日期文件夹下的所有日志数据
        List<ConcurrentMap<Long, Log>> logAConcurrentMapList = getLogMapList(fileA, date);
        if (logAConcurrentMapList == null || logAConcurrentMapList.size() <= 0) {
            return;
        }
        List<ConcurrentMap<Long, Log>> logBConcurrentMapList = getLogMapList(fileB, date);
        if (logBConcurrentMapList == null || logBConcurrentMapList.size() <= 0) {
            return;
        }
        //开启异步线程，进行日志分析
        CompletableFuture.runAsync(() -> {
            long start = System.currentTimeMillis();
//            System.out.println("线程开启");
            List<Log> logList = new ArrayList<>();

            logAConcurrentMapList.stream().map(longLogConcurrentMap -> CompletableFuture.runAsync(()->{

            },threadPoolExecutor));


            logAConcurrentMapList.forEach(logAConcurrentMap -> {
                Set<Long> logIds = logAConcurrentMap.keySet();
                logIds.forEach(logId -> {
                    List<Log> logList1 = logBConcurrentMapList.stream().map(logBConcurrentMap -> {
                        if (logBConcurrentMap.containsKey(logId)) {
                            Log logA = logAConcurrentMap.get(logId);
                            Log logB = logBConcurrentMap.get(logId);
//                            System.out.println("线程循环中");
                            boolean result = logA.getTag().equals(logB.getTag());
                            return new Log(logA.getLogId(), logA.getLogDateTime(), result);
                        }
                        return null;
                    }).collect(Collectors.toList());
                    logList.addAll(logList1);
                });
            });
            //若数据量超过10000，则进行分批操作
            int maxNum = logList.size();
            int num = 0;
            while (maxNum > 0) {
                List<Log> logs;
                if (maxNum >= 10000) {
                    logs = logList.subList(num, num += 10000);
                    maxNum -= 10000;
                } else {
                    /*rabbitTemplate.convertAndSend(
                            "EXCHANGE_RECEPTION",
                            "ROUTING_KEY_RECEPTION",
                            logList);*/
                    logs = logList.subList(logList.size() - maxNum, logList.size());
                    maxNum -= logList.size();
                }
                String logJsonStr = toJSONString(logs);
                rabbitTemplate.convertAndSend(
                        "EXCHANGE_RECEPTION",
                        "ROUTING_KEY_RECEPTION",
                        logJsonStr);
            }
            long end = System.currentTimeMillis();
//            System.out.println("线程结束");
            System.out.println("分析结果执行时间：" + (end - start));
        }, threadPoolExecutor);
    }

    /**
     * 获取日志文件
     * @param file 日志文件目录
     * @param date 需要查询的日志日期
     */
    private List<ConcurrentMap<Long, Log>> getLogMapList(File file, String date) {
        //获取目录下的所有日志列表
        File[] files = file.listFiles();
        if (files == null || files.length == 0) {
            return null;
        }
        //若目录列表大于1则查询是否存在已经分析过的日志文件
        if (files.length > 1) {
            //从缓存中查询是否有已经分析过的日志文件
//          List<String> logNameList = (List<String>) redisTemplate.opsForHash().get("log:name", date);
            String logName = (String) redisTemplate.opsForHash().get("log:name", date);
            if (logName != null) {
                //获取缓存日志文件编号
                int cacheLogNum = Integer.parseInt(logName.substring(0, 1));
                files = Arrays.stream(files).filter(preFile -> {
                    //获取日志文件编号
                    int logNum = Integer.parseInt(preFile.getName().substring(0, 1));
                    //校验是否比缓存日志文件编号值大，若大于则表示属于还未分析的日志文件
                    return logNum > cacheLogNum;
                }).toArray(File[]::new);
            }
//            if (logNameList!=null){
//                String[] logNames = logNameList.toArray(new String[0]);
//                files = Arrays.stream(files).filter(file1 -> {
//                    boolean flag = true;
//                    for (String logName : logNames) {
//                        if (logName.equals(file1.getName())) {
//                            flag = false;
//                        }
//                    }
//                    return flag;
//                }).toArray(File[]::new);
//            }
        }
        Arrays.sort(files);

        Long logId = (Long) redisTemplate.opsForHash().get("log:id", date);
        //获取需要查询的日志文件中，比排序后第一个文件中第一条数据id大的以分析过的日志数据id集合
//        String[] logArray = getLogArray(files[0]);
//        List<Long> logIdList = null;
//        if (logArray != null) {
//            //获取排序后文件中的第一条数据
//            List<Log> list = Arrays.stream(logArray).map(logStr -> {
//                if (logStr.contains("\"logId\":")) {
//                    String[] split = logStr.split(" ");
//                    return JSON.parseObject(split[split.length - 1], Log.class);
//                }
//                return null;
//            }).collect(Collectors.toList());
//            list.removeIf(Objects::isNull);
//            list= list.stream().sorted(Comparator.comparing(Log::getLogId)).collect(Collectors.toList());
//            Log logOne = list.get(0);
//            logIdList = receptionFeignClient.findLogIdList(logOne.getLogId(),date);
////            for (String logStr : logArray) {
////                if (logStr.contains("\"logId\":")) {
////                    String[] split = logStr.split(" ");
////                    Log log = JSON.parseObject(split[split.length - 1], Log.class);
////                    //调用远程接口获取所有已分析过的当日的日志id
////                    logIdList = receptionFeignClient.findLogIdList(log.getLogId(), date);
////                    break;
////                }
////            }
//        }
        //创建concurrentMapList集合用户存储未分析的日志数据集合
        List<ConcurrentMap<Long, Log>> concurrentMapList = new ArrayList<>();
        CompletableFuture[] completableFutures = Arrays.stream(files)
                .map(preFile -> CompletableFuture.supplyAsync(() -> {
                    Long maxLogId = 0L;
                    //获取日志文件中的数据
                    String[] logArrays = getLogArray(preFile);
                    //将logArray中的日志数据组装成map集合
                    ConcurrentMap<Long, Log> logMap = null;
                    if (logArrays != null) {
                        List<Log> list = Arrays.stream(logArrays).map(logStr -> {
                            if (logStr.contains("\"logId\"")) {
                                String[] split = logStr.split(" ");
                                Log log = JSON.parseObject(split[split.length - 1], Log.class);
                                if (logId != null && log.getLogId() <= logId) {
                                    return null;
                                }
                                log.setLogDateTime(split[0] + " " + split[1]);
                                return log;
                            }
                            return null;
                        }).collect(Collectors.toList());
                        //去除为null的数据
                        list.removeIf(Objects::isNull);

                        maxLogId = list.stream().max(Comparator.comparing(Log::getLogId)).orElse(new Log()).getLogId();
                        if (logId != null) maxLogId = maxLogId > logId ? maxLogId : logId;
                        //转换为map集合
                        logMap = list.stream().collect(Collectors.toConcurrentMap(Log::getLogId, log -> log));
                    }
                    concurrentMapList.add(logMap);
                    return maxLogId;
                }, threadPoolExecutor)).toArray(CompletableFuture[]::new);

        CompletableFuture.allOf(completableFutures).join();
        Long maxLogId = (Long) Arrays.
                stream(completableFutures)
                .max(Comparator.comparing(completableFuture ->(Long)completableFuture.join()))
                .get().join();
        /*for (File perFile : files) {
            //获取日志文件中的数据
            String[] logArrays = getLogArray(perFile);
            //将logArray中的日志数据组装成map集合
            ConcurrentMap<Long, Log> logMap = null;
            if (logArrays !=null){
                List<Log> list = Arrays.stream(logArrays).map(logStr -> {
                    if (logStr.contains("\"logId\"")) {
                        String[] split = logStr.split(" ");
                        Log log = JSON.parseObject(split[split.length - 1], Log.class);
                        if (logId != null && log.getLogId() < logId) {
                            return null;
                        }
                        log.setLogDateTime(split[0] + " " + split[1]);
                        return log;
                    }
                    return null;
                }).collect(Collectors.toList());
                list.removeIf(Objects::isNull);
//                list= list.stream().sorted(Comparator.comparing(Log::getLogId)).collect(Collectors.toList());
                logId = list.stream().max(Comparator.comparing(Log::getLogId)).get().getLogId();
                logMap = list.stream().collect(Collectors.toConcurrentMap(Log::getLogId,log -> log));
            }
            concurrentMapList.add(logMap);
*/
        /*            if (logArrays != null) {
                //将logArray中的日志数据组装成map集合
                ConcurrentMap<Long, Log> logMap = new ConcurrentHashMap<>();
                List<Long> finalLogIdList = logIdList;
                if (finalLogIdList != null){
                    List<Log> collect = Arrays.stream(logArrays).map(logStr -> {
                        if (logStr.contains("\"logId\"")) {
                            String[] split = logStr.split(" ");
                            Log log = JSON.parseObject(split[split.length - 1], Log.class);
                            if (!finalLogIdList.contains(log.getLogId())) {
                                log.setLogDateTime(split[0] + " " + split[1]);
                                return log;
                            }
                        }
                        return null;
                    }).collect(Collectors.toList());
                    collect.removeIf(Objects::isNull);
                    logMap = collect.stream().collect(Collectors.toConcurrentMap(Log::getLogId, log -> log));
                }
//                Arrays.stream(logArrays).forEach(logStr -> {
//                    if (logStr.contains("\"logId\"")) {
//                        String[] split = logStr.split(" ");
//                        Log log = JSON.parseObject(split[split.length - 1], Log.class);
//                        //若finalLogIdList不为null并且日志id存在集合中，说明已分析过
//                        if (finalLogIdList != null) {
//                            if (finalLogIdList.contains(log.getLogId())) return;
//                        }
//                        log.setLogDateTime(split[0] + " " + split[1]);
//                        logMap.put(log.getLogId(), log);
//                    }
//                });
                concurrentMapList.add(logMap);
            }*/
        //将分析完毕的日志文件名存入缓存中
        if (files.length > 1) {
            //最后一个日志文件不存入缓存中
//            files = Arrays.copyOf(files,files.length - 1);
//            List<String> collect = Arrays.stream(files).map(File::getName).collect(Collectors.toList());
//            if (logNameList!= null){
//                collect.addAll(logNameList);
//            }
            redisTemplate.opsForHash().put("log:name", date, files[files.length - 2].getName());
        }
        redisTemplate.opsForHash().put("log:id",date,maxLogId);
        return concurrentMapList;
    }

    /**
     * 获取日志文件中的数据
     */
    private String[] getLogArray(File file) {
        try {
            BufferedReader bufferedReader = null;
            bufferedReader = new BufferedReader(new FileReader(file));
            String line;
            StringBuilder stringBuilder = new StringBuilder();
            while (((line = bufferedReader.readLine())) != null) {
                stringBuilder.append(line).append("\n");
            }
            //拆分读取到的日志数据
            String logString = stringBuilder.toString();
            return logString.split("\n");
        } catch (IOException e) {
            e.printStackTrace();
        }
        return null;
    }
//    static int i = 0;
//    private List<ConcurrentMap<Long, Log>> getLogMapList2(File file, String date) {
//        //获取目录下的所有日志文件
//        File[] files = file.listFiles();
//        if (files == null) {
//            return null;
//        }
//        Arrays.sort(files);
//        List<File> fileList = Arrays.stream(files).collect(Collectors.toList());
//        //调用远程接口获取该日期的所有已分析的logId
//        List<Long> logIdList = receptionFeignClient.findAllLogId(date);
//        //如果logId数量大于10000，将所有logId按照每10000个为一个集合进行分批
//        List<List<Long>> batchLogIdList = new ArrayList<>();
//        if (logIdList.size() > 10000) {
//            int maxNum = logIdList.size();
//            int num = 0;
//            while (maxNum > 0) {
//                List<Long> logIds;
//                if (maxNum >= 10000) {
//                    logIds = logIdList.subList(num, num += 10000);
//                    maxNum -= 10000;
//                } else {
//                    logIds = logIdList.subList(logIdList.size() - maxNum, logIdList.size());
//                    maxNum -= logIdList.size();
//                }
//                batchLogIdList.add(logIds);
//            }
//        }
//        //通过异步线程过滤出日志文件中未分析的数据
//        return fileList.stream().map(file1 -> {
//            System.out.println("日志过滤开启:{" + file1.getName() + "}");
//            try {
//                //获取读取到的数据
//                BufferedReader bufferedReader = new BufferedReader(new FileReader(file1));
//                String line;
//                StringBuilder stringBuilder = new StringBuilder();
//                while (((line = bufferedReader.readLine())) != null) {
//                    stringBuilder.append(line).append("\n");
//                }
//                //拆分读取到的日志数据
//                String logString = stringBuilder.toString();
//                String[] logArray = logString.split("\n");
//                //将logArray中的日志数据组装成map集合
//                ConcurrentMap<Long, Log> logMap = new ConcurrentHashMap<>();
//                Arrays.stream(logArray).forEachOrdered(logStr -> {
//                    if (logStr.contains("\"logId\"")) {
//                        String[] s2 = logStr.split(" ");
//                        Log log = JSONObject.parseObject(s2[s2.length - 1], Log.class);
//                        //判断该数据是否分析过
//                        if (batchLogIdList.size() > 0) {
////                            CompletableFuture[] completableFutures = new CompletableFuture[batchLogIdList.size()];
////                            batchLogIdList.forEach(logIds ->{
////
////                                CompletableFuture completableFuture = CompletableFuture.supplyAsync(() -> {
////                                    long start = System.currentTimeMillis();
////                                        if (!logIds.contains(log.getLogId())) {
////                                            log.setLogDateTime(s2[0] + " " + s2[1]);
////                                            logMap.put(log.getLogId(), log);
////                                        }
////                                        long end = System.currentTimeMillis();
//////                                        System.out.println("分析完毕,耗时：" + (end - start));
////                                        return "OK";
////                                }, threadPoolExecutor);
////                                completableFutures[i++] = completableFuture;
////                            });
////                            CompletableFuture.allOf(completableFutures).join();
////                            i = 0
//                            CompletableFuture[] completableFutures = batchLogIdList
//                                    .stream().map(logIds -> CompletableFuture.supplyAsync(() -> {
////                                        long start = System.currentTimeMillis();
//                                        if (logIds.contains(log.getLogId())) {
//                                            log.setLogDateTime(s2[0] + " " + s2[1]);
//                                            logMap.put(log.getLogId(), log);
//                                        }
////                                        long end = System.currentTimeMillis();
////                                        System.out.println("分析完毕,耗时：" + (end - start));
//                                        return "ok";
//                                    }, threadPoolExecutor)).toArray(CompletableFuture[]::new);
//                            CompletableFuture.allOf(completableFutures).join();
//                        } else {
//                            if (!logIdList.contains(log.getLogId())) {
//                                log.setLogDateTime(s2[0] + " " + s2[1]);
//                                logMap.put(log.getLogId(), log);
//                            }
//                        }
//                    }
//                });
//                System.out.println("日志过滤结束:{" + file1.getName() + "}");
//               /* for (String logStr : logArray) {
//                    if (logStr.contains("\"logId\"")) {
//                        String[] s2 = logStr.split(" ");
//                        Log log = JSONObject.parseObject(s2[s2.length - 1], Log.class);
//                        //判断该数据是否分析过
//                        if (!logIdList.contains(log.getLogId())){
//                            log.setLogDateTime(s2[0] + " " + s2[1]);
//                            logMap.put(log.getLogId(), log);
//                        }
//                    }
//                }*/
//                return logMap;
//            } catch (IOException e) {
//                e.printStackTrace();
//            }
//            return null;
//        }).collect(Collectors.toList());
//    }
}
