/**
 * @author ：hzs
 * @date ：Created in 2021/1/28 22:08
 * @description：
 * @modified By：
 * @version: $
 */

import org.apache.commons.lang3.StringUtils;
import util.SplitTextInSentence;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * TextRank关键词提取
 * @author hankcs
 */
public class TextRankKeyword {
    private static Pattern pattern = Pattern.compile("\\[\\d+([,–-]\\d+)*\\]");

    public static final int nKeyword = 10;
    /**
     * 阻尼系数（ＤａｍｐｉｎｇＦａｃｔｏｒ），一般取值为0.85
     */
    static final float d = 0.85f;
    /**
     * 最大迭代次数
     */
    static final int max_iter = 200;
    static final float min_diff = 0.001f;

    private static Set<String> stopWordSet = new HashSet<>();

    public TextRankKeyword() {
        System.setProperty("java.util.Arrays.useLegacyMergeSort", "true");
    }

    public static Map<String, Float> getKeyword(String title, String content) {
        List<String> sentences = new ArrayList<>();
        sentences.add(title);
        sentences.addAll(SplitTextInSentence.testChunkSentences(content));
        List<String> wordList = new ArrayList<>();
        for (String sentence : sentences) {
            sentence = sentence.replace("\n", " ");
            sentence = cutTail(sentence);
            String[] split = sentence.split(" ");
            for (String word : split) {
                word = convertWord(word);
                if (shouldInclude(word)) {
                    wordList.add(word);
                }
            }
        }

        Map<String, Set<String>> words = new HashMap<>();
        Queue<String> que = new LinkedList<>();
        for (String w : wordList) {
            if (!words.containsKey(w)) {
                words.put(w, new HashSet<>());
            }
            que.offer(w);
            if (que.size() > 5) {
                que.poll();
            }
            for (String w1 : que) {
                for (String w2 : que) {
                    if (w1.equals(w2)) {
                        continue;
                    }
                    words.get(w1).add(w2);
                    words.get(w2).add(w1);
                }
            }
        }
        Map<String, Float> score = new HashMap<>();
        for (int i = 0; i < max_iter; ++i) {
            Map<String, Float> m = new HashMap<>();
            float max_diff = 0;
            for (Map.Entry<String, Set<String>> entry : words.entrySet()) {
                String key = entry.getKey();
                Set<String> value = entry.getValue();
                m.put(key, 1 - d);
                for (String other : value) {
                    int size = words.get(other).size();
                    if (key.equals(other) || size == 0) {
                        continue;
                    }
                    m.put(key, m.get(key) + d / size * (score.get(other) == null ? 0 : score.get(other)));
                }
                max_diff = Math.max(max_diff, Math.abs(m.get(key) - (score.get(key) == null ? 0 : score.get(key))));
            }
            score = m;
            if (max_diff <= min_diff) {
                break;
            }
        }
        return score;
    }

    public static void main(String[] args)
    {
        String content = "Argumentation models have provided a fruitful source of ideas and technologies within both theoretical studies and\n" +
                "applications of AI. A recent overview of these contributions may be found in the survey of Bench-Capon and Dunne [1,2].\n" +
                "Two important models which have received considerable attention over the last ten years are the abstract argumentation\n" +
                "frameworks (afs) of Dung [12] and the related assumption based frameworks (abfs) of Bondarenko et al. [4]. Both approaches\n" +
                "provide interpretations for intuitive notions of “collection of justified arguments” as subsets satisfying particular\n" +
                "criteria with respect to the underlying framework. In Dung’s model the concept of “argument” is regarded as an atomic\n" +
                "entity whose principal feature of interest concerns those other arguments with which it is incompatible (such incompatibility\n" +
                "being described by the so-called attack relation). The formalism adopted in Bondarenko et al. develops a rationale\n" +
                "capturing incompatibility by treating an argument’s structure in terms of an assertion which is the outcome of a formal\n" +
                "derivation process within some logical theory. In this way two arguments are incompatible if the assertion supported by\n" +
                "one is inconsistent with the premises from which the other is [8–10] derived. ";
//        System.out.println(new TextRankKeyword().getKeyword("", content));
    }

    /**
     * 还原单词
     * @param word
     * @return
     */
    private static String convertWord(String word) {
        // 如果是引用索引号
        Matcher matcher = pattern.matcher(word);
        if (matcher.matches()) {
            return "";
        }
        word = word.toLowerCase().replaceAll("[^a-z0-9\\-’']" , "");
        return word;
    }

    /**
     * 去除句子末尾标点符号
     * @param sentence
     * @return
     */
    private static String cutTail(String sentence) {
        String[] chars = {".", "?", "!", ";"};
        for (String s : chars) {
            if (sentence.endsWith(s)) {
                sentence = sentence.substring(0, sentence.length()-1);
                break;
            }
            if (sentence.endsWith(s + " ")) {
                sentence = sentence.substring(0, sentence.length()-2);
                break;
            }
        }
        return sentence;
    }

    private static boolean shouldInclude(String word) {
        if (word.length() < 2) {
            return false;
        }

        if (StringUtils.isNumeric(word)) {
            return false;
        }

        if (stopWordSet.isEmpty()) {
            String path = TextRankKeyword.class.getClassLoader().getResource("stopwordsEN.txt").getPath();
            try (BufferedReader br = new BufferedReader(new InputStreamReader(
                    new FileInputStream(path)))) {
                for (String line = br.readLine(); line != null; line = br.readLine()) {
                    stopWordSet.add(line);
                }
            } catch (Exception e) {
                e.printStackTrace();
                return false;
            }
        }
        if (stopWordSet.contains(word)) {
            return false;
        }

        return true;
    }
}
