package top.swimmer.tokenizer.core;

import top.swimmer.tokenizer.dictionary.AbstractDictionary;
import top.swimmer.tokenizer.dictionary.DictionaryFactory;
import top.swimmer.tokenizer.entities.Word;

import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.stream.Collectors;

import static top.swimmer.tools.number.NumberUtils.min;

/**
 * Created by xiaoyou on 17-1-6.
 */
public class ForwardMaximumMatching extends AbstractSegmentation {

    private AbstractDictionary dictionary;

    public ForwardMaximumMatching(AbstractDictionary dictionary) {
        this.dictionary = dictionary;
    }

    @Override
    public AbstractDictionary getDictionary() {
        return dictionary;
    }

    @Override
    List<Word> segmentation(String text) {
        Queue<Word> results = new LinkedList<>();
        int textLength = text.length();
        // 设置词的最大长度，词库中最长词的长度跟目标句子长度中，取最小
        int wordMaxLength = min(getDictionary().getMaxWordLength(), textLength);
        int start = 0; // 开始分词的位置

        while (start < textLength) {
            int currentLength = min(textLength - start, wordMaxLength); // 未分词的句子长度
            boolean isSeg = false;
            while (start + currentLength <= textLength) {
                if (getDictionary().contains(text, start, currentLength)) {
                    addWord(results, text, start, currentLength); // 成功分词  加入results中
                    isSeg = true;
                    break;
                } else if (--currentLength <= 0) { // 剩余长度为0 跳出当前循环
                    break;
                }
            }

            if (isSeg) {
                start += currentLength;
            } else {
                addWord(results, text, start++, 1); // 没有分出词  单字成词
            }
        }

        return new ArrayList<>(results);
    }

    public static void main(String[] args) throws InterruptedException {
        ForwardMaximumMatching minimumMatching = new ForwardMaximumMatching(DictionaryFactory.getDoubleArrayTrieDictionary());
        System.out.println(minimumMatching.segmentation("黑羽"));
        System.out.println(minimumMatching.segmentation("黑羽千影"));
//        System.out.println(minimumMatching.segmentation("贵是贵是贵是贵是贵是贵是贵是贵是"));
//        System.out.println(minimumMatching.segmentation("南京市长江大桥"));
//        System.out.println(minimumMatching.segmentation("贵是哈南京市长江大桥哈贵是"));
//        System.out.println(minimumMatching.segmentation("我们都有一个家"));
//        System.out.println(minimumMatching.segmentation("阿基受到法律看还是来开发"));
//        System.out.println(minimumMatching.segmentation("长春市长王铁路"));
//        System.out.println(minimumMatching.segmentation("自如有家是一家创造品质租住产品的互联网公司"));
//        System.out.println(minimumMatching.segmentation("无论是后来故事怎么了"));
//        System.out.println(minimumMatching.segmentation("也要让后来人生精彩着"));
//        System.out.println(minimumMatching.segmentation("无论是后来故事怎么了，也要让后来人生精彩着！"));
//        System.out.println(minimumMatching.segmentation("也要！@#让后\"来人%……&生精彩着"));
//        System.out.println(minimumMatching.segmentation("他不管三七二十一就骂她是二百五，我就无语了，真是个二货。他还问我：“杨老师，‘二货’是什么意思？”"));

        Thread.sleep(10000000L);
    }
}
