package com.le.tester.imooc.oldfairy.coding.buffer;

import org.junit.Test;
import org.redisson.misc.Hash;

import java.io.*;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.*;

/**
 * createTime：2023/5/16 14:57
 * description：统计词频，分为单线程和多线程，去证明程序的瓶颈，而不是去思考程序的瓶颈
 */
public class WordCount {

    private String filePath = "E:\\Desktop\\test.txt";


    public ForkJoinPool forkJoinPool = new ForkJoinPool();

    //单个线程进行实现读取
    @Test
    public void compareWithSingle() throws IOException {

        BufferedInputStream buffIn = new BufferedInputStream(new FileInputStream(filePath));

        byte[] byteSize = new byte[4 * 1024];
        int len = 0;
        HashMap<String, Integer> map = new HashMap<>();
        long startTime = System.currentTimeMillis();

        //对于byteSize的大小设置大小，这里设置的大小之后将会减少循环的次数
        while ((len = buffIn.read(byteSize)) != -1) {
            //len的使用技巧体现在Arrays.Copy

            byte[] bytes = Arrays.copyOfRange(byteSize, 0, len);
            String str = new String(bytes);
            //统计词频,byteSize的大小不同，导致在切词语的时候切的不是想要的，理论上应该是要五个一组
            HashMap<String, Integer> hashMap = countByString(str);

            for (Map.Entry<String, Integer> mapItem : hashMap.entrySet()) {
                String key = mapItem.getKey();
                incKey(key, map, mapItem.getValue());
            }
        }

        System.out.println("time:" + (System.currentTimeMillis() - startTime) + "ms");
        System.out.println(map.get("ababb"));
        System.out.println(map.size());
    }

    //统计词频
    private HashMap<String, Integer> countByString(String str) {
        HashMap<String, Integer> resultMap = new HashMap<>();

        //已经自动定义了分界符,自动分析了该字符串
        StringTokenizer stringTokenizer = new StringTokenizer(str);
        while (stringTokenizer.hasMoreTokens()) {
            String word = stringTokenizer.nextToken();
            incKey(word, resultMap, 1);
        }

        return resultMap;
    }

    //map中的key增长
    private void incKey(String key, HashMap<String, Integer> resultMap, Integer n) {
        if (resultMap.containsKey(key)) {
            resultMap.put(key, resultMap.get(key) + n);
        } else {
            resultMap.put(key, n);
        }

    }

    //exec task
    class CountTask implements Callable<HashMap<String, Integer>> {

        public final long start;

        public final long end;

        public final String fileName;


        public CountTask(String fileName, long start, long end) {
            this.start = start;
            this.end = end;
            this.fileName = fileName;
        }

        @Override
        public HashMap<String, Integer> call() throws Exception {
            //使用channel
            FileChannel fileChannel = new RandomAccessFile(this.fileName, "rw").getChannel();

            //kCopy->userCopy change oneCopy
            MappedByteBuffer mBuf = fileChannel.map(
                    FileChannel.MapMode.READ_ONLY,
                    this.start,
                    this.end - this.start);
            String str = StandardCharsets.US_ASCII.decode(mBuf).toString();
            return countByString(str);
        }
    }

    //run
    public void run(String fileName, long chunkSize) throws ExecutionException, InterruptedException {
        File file = new File(fileName);
        long length = file.length();


        long position = 0;
        long startTime = System.currentTimeMillis();
        //存放数据
        ArrayList<Future<HashMap<String, Integer>>> tasks = new ArrayList<>();

        while (position < length) {
            //分片的技巧
            long next = Math.min(position + chunkSize, length);
            CountTask countTask = new CountTask(fileName, position, next);
            position = next;
            //提交线程池
            ForkJoinTask<HashMap<String, Integer>> submit = forkJoinPool.submit(countTask);
            tasks.add(submit);
        }
        System.out.format("split to %d tasks\n", tasks.size());

        HashMap<String, Integer> totalMap = new HashMap<>();

        for (Future<HashMap<String, Integer>> tItem : tasks) {
            HashMap<String, Integer> map = tItem.get();
            for (Map.Entry<String, Integer> entry : map.entrySet()) {
                incKey(entry.getKey(), totalMap, entry.getValue());
            }

        }

        System.out.println("time:" + (System.currentTimeMillis() - startTime) + "ms");
        System.out.println("total:" + totalMap.size());

        System.out.println(totalMap.get("ababb"));
    }

    @Test
    public void count() throws ExecutionException, InterruptedException {
        WordCount wordCount = new WordCount();
        System.out.println("processors:" + Runtime.getRuntime().availableProcessors());
        wordCount.run(filePath, 1024 * 1024 * 10);
    }


}
