package utils;

import analyser.Analyser;
import dao.DaoFactory;
import dao.MessageDao;
import entity.Message;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.text.Collator;
import java.util.*;

//维护一个HashMap，统计逆文档频率的分母，即每个词在多少文章中出现
public class IDFParse {

    private HashMap<String, Integer> countsMap = new HashMap<String, Integer>();
    private ArrayList<String> stopWords = null;

    Analyser analyser = new Analyser();

    public IDFParse() {
        readStopWords("stopwords.txt");
    }

    public void parseMessage(Message message) {
        String content = message.getContent();
        List<String> words = analyser.analyse(content);
        filterStopWords(words, stopWords);
        combineWords(words);
    }

    //对于参数中出现的词，给coutMap加一，而不管出现多少次
    //用于计算逆文档频率
    public void combineWords(List<String> strs) {
        List<String> noDupStrs = new ArrayList<String>(new HashSet<String>(strs));
        for(int i = 0; i < noDupStrs.size(); i++) {
            String word = noDupStrs.get(i);
            Integer count = countsMap.get(word);
            if(count == null) {
                count = 1;
            } else {
                count += 1;
            }
            countsMap.put(word, count);
        }
    }

    public void readStopWords(String path) {
        InputStream inStream = IDFParse.class.getClassLoader().getResourceAsStream(path);
        ArrayList<String> stopWords = null;
        try {
            BufferedReader reader = new BufferedReader(new InputStreamReader(inStream, "UTF-8"));
            stopWords = new ArrayList<String>();
            String word = null;
            while ((word = reader.readLine()) != null) {
                stopWords.add(word);
            }
                    } catch (IOException e) {
            e.printStackTrace();
        }
        this.stopWords = stopWords;
    }

    public void filterStopWords(List<String> words, List<String> stopWords) {
        List<String> tmp = new ArrayList<String>();
        for(int i = 0; i < words.size(); i++) {
            if(!binarySearch(stopWords, words.get(i))) {
                tmp.add(words.get(i));
            }
        }
        words.clear();
        words.addAll(tmp);
    }

    private boolean binarySearch(List<String> words, String target) {
        Collator clt = Collator.getInstance();
        Collections.sort(words ,new Comparator<String>(){
            Collator clt=Collator.getInstance();
            public int compare(String o1, String o2) {
                return clt.compare(o1, o2);
            }
        });
        int i = 0, j = words.size() - 1, mid = 0;
        while (i <= j) {
            mid = (i + j) / 2;
            if(words.get(mid).equals(target)) return true;
            if(clt.compare(words.get(mid), target) > 0) {
                j = mid - 1;
            } else {
                i = mid + 1;
            }
        }
        return false;
    }

    public HashMap<String, Integer> getCountsMap() {
        return countsMap;
    }

    public static void main(String[] args) {
        IDFParse idfParse = new IDFParse();
        MessageDao dao = DaoFactory.getMessageDaoInstance();
        Message message = dao.getMessagebyId(1);
        idfParse.parseMessage(message);
        HashMap<String, Integer> rst = idfParse.getCountsMap();
    }
}
