package com.simtop.hj.utils;
import com.kennycason.kumo.WordFrequency;
import com.kennycason.kumo.nlp.FrequencyAnalyzer;
import com.kennycason.kumo.nlp.tokenizers.ChineseWordTokenizer;
import com.simtop.hj.pojo.WordFrequencyData;
import com.simtop.hj.pojo.WordFrequencyPic;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;

public class WordFrequencyAnalyzer {
    public static WordFrequencyPic getWordFrequencyAnalyzer(Integer wordSize, List<String> str) {
        WordFrequencyPic wordFrequencyPic = new WordFrequencyPic();
        FrequencyAnalyzer frequencyAnalyzer = new FrequencyAnalyzer();
        int count = 0;
        frequencyAnalyzer.setWordFrequenciesToReturn(600);
        frequencyAnalyzer.setMinWordLength(2);
        frequencyAnalyzer.setMaxWordLength(6);
        frequencyAnalyzer.setWordTokenizer(new ChineseWordTokenizer());
        final List<WordFrequency> wordFrequenciesList = frequencyAnalyzer.load(str);
        List<WordFrequency> wordFrequencies = new ArrayList<>();
        int j = 0;
        Pattern pattern = Pattern.compile("^[-\\+]?[\\d]*$");
        for (int i = 0; i < wordFrequenciesList.size(); i++) {
            if (j < wordSize) {
                String word = wordFrequenciesList.get(i).getWord();
                if(!pattern.matcher(word).matches()){
                    wordFrequencies.add(wordFrequenciesList.get(i));
                    count += wordFrequencies.get(j).getFrequency();
                    j++;
                }
            }
        }
        wordFrequencyPic.setWordFrequencyList(wordFrequencies);
        wordFrequencyPic.setCount(count);
        return wordFrequencyPic;
    }


    public static List<WordFrequencyData> getWordFrequencyAnalyzerAll(List<String> str) {
        WordFrequencyPic wordFrequencyPic = new WordFrequencyPic();
        FrequencyAnalyzer frequencyAnalyzer = new FrequencyAnalyzer();
        int count = 0;
        frequencyAnalyzer.setWordFrequenciesToReturn(600);
        frequencyAnalyzer.setMinWordLength(2);
        frequencyAnalyzer.setMaxWordLength(6);
        frequencyAnalyzer.setWordTokenizer(new ChineseWordTokenizer());
        final List<WordFrequency> wordFrequenciesList = frequencyAnalyzer.load(str);
        List<WordFrequency> wordFrequencies = new ArrayList<>();
        Pattern pattern = Pattern.compile("^[-\\+]?[\\d]*$");
        int j=0;
        for (int i = 0; i < wordFrequenciesList.size(); i++) {
            String word = wordFrequenciesList.get(i).getWord();
            if(!pattern.matcher(word).matches()){
                wordFrequencies.add(wordFrequenciesList.get(i));
                count += wordFrequencies.get(j).getFrequency();
                j++;
            }
        }
        List<WordFrequencyData> wordFrequencyDataList=new ArrayList<>();
        for (int i = 0; i < wordFrequencies.size(); i++) {
            WordFrequencyData wordFrequencyData=new WordFrequencyData();
            wordFrequencyData.setWord(wordFrequencies.get(i).getWord());
            wordFrequencyData.setFrequency(wordFrequencies.get(i).getFrequency());
            wordFrequencyDataList.add(wordFrequencyData);
        }
        return wordFrequencyDataList;
    }
}
