package org.limited.memory;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.LineHandler;
import cn.hutool.core.io.file.FileAppender;
import cn.hutool.core.io.file.FileReader;
import com.alibaba.fastjson.JSONObject;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.limited.memory.utils.Constant;
import org.limited.memory.utils.MemoryInfoUtil;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

@Slf4j
public class MapReduceStatisticalDuplication {

    public static void main(String[] args) {
        //输出jvm内存信息
        MemoryInfoUtil.memoryInfo();

//        List<MapReduce.WordAndCount> top5 = MapReduce.topK(5);
//
//        log.info("top 20:{}", JSONObject.toJSONString(top5));
        //统计所有字符的平均数
        Double avg = MapReduce.avg();
        log.info("avg:{}", avg);
    }

    public static class MapReduce {
        public static class Map {
            public static File[] exec() {
                //创建临时工作空间
                String workSpace = createWorkSpace();
                //将大文件拆分成小文件
                final File[] smallFiles = splitFile(Constant.bigFilePath, Constant.ten_MB);

                log.info("small files:{}", Arrays.stream(smallFiles).map(File::getAbsolutePath).collect(Collectors.toList()));
                //统计每个文件
                smallFileStatisticalDuplication(smallFiles, workSpace);

                return FileUtil.ls(workSpace);
            }

            private static String createWorkSpace() {
                String workSpace = Constant.mapWorkFilePath + System.currentTimeMillis();
                FileUtil.mkdir(workSpace);
                return workSpace;
            }

            private static String createTempSpace() {
                String tempSpace = Constant.mapTempFilePath + System.currentTimeMillis();
                FileUtil.mkdir(tempSpace);
                return tempSpace;
            }


            private static void smallFileStatisticalDuplication(File[] smallFiles, String workSpace) {
                IntStream.rangeClosed(0, smallFiles.length - 1).parallel().forEach(i -> {
                    File smallFile = smallFiles[i];
                    statisticalDuplication(smallFile, workSpace);
                });
            }

            private static void statisticalDuplication(File smallFile, String workSpace) {
                //map用于存储结果
//                java.util.Map<Character, Integer> wordCountMap = new HashMap<>();
                int[] buckets = new int[10];
                //读取文件 统计每个字符出现次数
                FileReader fileReader = new FileReader(smallFile);

                LineHandler handler = line -> {
                    if (StringUtils.isNotBlank(line)) {
                        char[] charArray = line.toCharArray();
                        for (int i = 0; i < charArray.length; i++) {
                            buckets[charArray[i] - '0']++;
                        }
                    }
                };
                try {
                    fileReader.readLines(handler);
                } catch (Throwable e) {
                    MemoryInfoUtil.memoryInfo();
                    log.error("read lines error:{}", e.getMessage(), e);
                    return;
                }

                FileAppender appender = new FileAppender(new File(workSpace, smallFile.getName() + ".temp.result"), 10, true);
                for (int i = 0; i < buckets.length; i++) {
                    appender.append(i + Constant.Separator + buckets[i]);
                }
                appender.flush();
            }

            private static File[] splitFile(String sourceFile, long chunkSize) {
                String tempSpace = createTempSpace();
                File largeFile = new File(sourceFile);
                long totalSize = largeFile.length();
                log.info("file size: {}MB", totalSize / 1024 / 1024);

                int numberOfChunks = (int) Math.ceil((double) totalSize / chunkSize);
                //多线程处理
                IntStream.rangeClosed(0, numberOfChunks - 1).parallel().forEach(chunkNumber -> {
                    long startByte = chunkNumber * chunkSize;
                    long endByte = Math.min(startByte + chunkSize, totalSize);
                    File smallFile = new File(tempSpace, largeFile.getName() + "_" + chunkNumber + ".part");
                    try (RandomAccessFile raf = new RandomAccessFile(largeFile, "r")) {
                        byte[] buffer = new byte[(int) (endByte - startByte)];
                        raf.seek(startByte);
                        raf.readFully(buffer);

                        try (FileOutputStream fos = new FileOutputStream(smallFile)) {
                            fos.write(buffer);
                        }
                    } catch (IOException e) {
                        log.error("splitFile error,smallFile:{},msg:{}", smallFile.getAbsolutePath(), e.getMessage(), e);
                    }
                });
                return FileUtil.ls(tempSpace);
            }
        }

        @Data
        @AllArgsConstructor
        public static class WordAndCount {
            private String word;
            private Integer count;
        }

        @Data
        @AllArgsConstructor
        public static class WordAndProduct {
            private String word;
            private Integer count;
            private Long product;
        }

        public static class Reduce {


            public static List<WordAndCount> topK(File[] tempResultFiles, int k) {

                List<String> allTopK = IntStream.rangeClosed(0, tempResultFiles.length - 1)
                        .parallel()
                        .mapToObj(i -> {
                            File tempResultFile = tempResultFiles[i];
                            //每个文件读取前K行再进行汇总
                            return topK(tempResultFile, k);
                        })
                        .flatMap(Collection::stream)
                        .collect(Collectors.toList());
                List<WordAndCount> allTopKWordAndCountList = allTopK.stream()
                        .map(wordAndCount -> {
                            String[] wordAndCountArr = wordAndCount.split(Constant.Separator);
                            if (wordAndCountArr.length == 2) {
                                return new WordAndCount(wordAndCountArr[0], Integer.parseInt(wordAndCountArr[1]));
                            }
                            return null;
                        }).filter(Objects::nonNull)
                        .collect(Collectors.toList());

                return allTopKWordAndCountList.stream()
                        .collect(Collectors.groupingBy(WordAndCount::getWord, Collectors.summingInt(WordAndCount::getCount)))
                        .entrySet()
                        .stream()
                        .sorted(Collections.reverseOrder(java.util.Map.Entry.comparingByValue()))
                        .limit(k)
                        .map(t -> new WordAndCount(t.getKey(), t.getValue()))
                        .collect(Collectors.toList());
            }


            public static List<String> topK(File tempResultFile, int k) {
                FileReader.ReaderHandler<List<String>> handler = reader -> {
                    List<String> result = new ArrayList<>(k);
                    for (int i = 0; i < k; i++) {
                        result.add(reader.readLine());
                    }
                    return result;
                };
                FileReader fileReader = new FileReader(tempResultFile);
                return fileReader.read(handler);
            }

            public static Double avg(File[] tempResult) {
                List<WordAndProduct> wordAndProductList = IntStream.rangeClosed(0, tempResult.length - 1)
                        .parallel()
                        .mapToObj(i -> {
                            File tempResultFile = tempResult[i];
                            FileReader fileReader = new FileReader(tempResultFile);
                            return fileReader.readLines();
                        })
                        .flatMap(Collection::stream)
                        .map(wordAndCount -> {
                            String[] wordAndCountArr = wordAndCount.split(Constant.Separator);
                            if (wordAndCountArr.length == 2) {
                                return new WordAndProduct(wordAndCountArr[0], Integer.parseInt(wordAndCountArr[1]), (long) (Integer.parseInt(wordAndCountArr[0]) * Integer.parseInt(wordAndCountArr[1])));
                            }
                            return null;
                        })
                        .filter(Objects::nonNull).collect(Collectors.toList());
                log.info("wordAndProductList:{}", JSONObject.toJSONString(wordAndProductList));
                Integer countSum = wordAndProductList.stream().mapToInt(WordAndProduct::getCount).sum();
                long productSum = wordAndProductList.stream().mapToLong(WordAndProduct::getProduct).sum();
                log.info("countSum:{},productSum:{}", countSum, productSum);
                return (double) (productSum / countSum);
            }
        }

        public static List<WordAndCount> topK(int k) {

            final File[] tempResult = Map.exec();

            return Reduce.topK(tempResult, k);
        }

        public static Double avg() {

            final File[] tempResult = Map.exec();

            return Reduce.avg(tempResult);
        }
    }

}
