package org.example.docsearcher.config;

import org.ansj.domain.Term;
import org.ansj.splitWord.analysis.ToAnalysis;
import org.example.docsearcher.model.DocInfo;
import org.example.docsearcher.model.Relate;
import org.example.docsearcher.model.Result;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;

@Configuration
public class DocSearcher {
    public Index index = new Index();

    private static final String STOP_WORD_PATH = "stop.txt";
    private Set<String> stopWords = new HashSet<>();

    public DocSearcher() {
        //加载索引
        index.load();

        //加载停用词
        loadStopWord();
    }
    public List<Result> search(String query) {
        //1.对用户输入结果进行分词
        List<Term> terms = ToAnalysis.parse(query).getTerms();

        //2.通过用户输入的关键词在倒排索引中查询相关文档,并通过哈希表去重
        Map<Integer, Relate> allDocsHash = new HashMap<>();
        for(Term term : terms) {
            String key = term.getName();
            //判断是否是暂停词,或者空格
            if(!stopWords.contains(key) && !key.equals(" ")) {
                List<Relate> docs = index.getInverted(key);
                //判断是否有相关文章
                if(docs == null) {
                    continue;
                }
                //添加到查询结果
                for(Relate relate : docs) {
                    int docId = relate.getDocId();
                    //检测是否已经包含该文档
                    if(allDocsHash.containsKey(docId)) {
                        int weight1 = allDocsHash.get(docId).getWeight();
                        int weight2 = relate.getWeight();
                        int newWeight = weight1 + weight2;
                        //把hash表中文档的相关系数改为两个关键词的相关系数之后
                        allDocsHash.get(docId).setWeight(newWeight);
                        //更新相关系数高的词为关键词
                        if(weight1 < weight2) {
                            allDocsHash.get(docId).setKey(key);
                        }
                    }else{
                        allDocsHash.put(relate.getDocId(), relate);
                    }
                }
            }
        }

        //把哈希表中的值添加到数组中
        List<Relate> allDocs = new ArrayList<>();
        for(Map.Entry<Integer, Relate> entry : allDocsHash.entrySet()) {
            allDocs.add(entry.getValue());
        }

        //3.对查询到的结果按相关性降序排序
        allDocs.sort(new Comparator<Relate>() {
            @Override
            public int compare(Relate o1, Relate o2) {
                return o2.getWeight() - o1.getWeight();
            }
        });

        //4.通过正排索引查询文档相关信息并做返回处理
        List<Result> results = new ArrayList<>();
        for(Relate relate : allDocs) {
            DocInfo docInfo = index.getForward(relate.getDocId());
            Result result = new Result();
            result.setTitle(docInfo.getTitle());
            result.setUrl(docInfo.getUrl());
            result.setDesc(genDesc(terms, relate.getKey(), docInfo.getContent()));
            results.add(result);
        }
        return results;
    }

    private void loadStopWord() {
        try(BufferedReader bufferedReader = new BufferedReader(new FileReader(STOP_WORD_PATH))) {
            while(true) {
                String line = bufferedReader.readLine();
                if(line == null) {
                    //读取完毕
                    break;
                }
                stopWords.add(line);
            }

        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
    private String genDesc(List<Term> terms, String key, String content) {
        //通过关键词和正文生成摘要

        //在正文中查找第一次出现关键词的位置，使用正则表达式确保找到的的是单独的一个单词
        //注意ansj分词结果会转为小写，所以需要把content也转为小写再匹配
        content = content.toLowerCase().replaceAll("\\b" + key + "\\b", " " + key + " ");
        int firstPos = content.indexOf(" " + key + " ");

        //取该位置前后各150个字符作为摘要
        int begPos = Math.max(firstPos - 150, 0);
        int endPos = Math.min(firstPos + 150, content.length());
        String desc = content.substring(begPos, endPos) + "...";

        //给关键词加上<i>标签,(?i)表示不区分大小写替换
        for(Term term : terms) {
            String word = term.getName();
            desc = desc.replaceAll("(?i)" + " " + word + " ", " <i>" + word + "</i> ");
        }
        return desc;
    }
}
