package ltd.hxya.novel.book.service.impl;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import lombok.extern.slf4j.Slf4j;
import ltd.hxya.novel.book.mapper.WordAnalysisMapper;
import ltd.hxya.novel.book.service.IWordAnalysisService;
import ltd.hxya.novel.common.utils.CosineSimilarity;
import ltd.hxya.novel.entity.rowdata.WordAnalysis;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * <p>
 * 根据相关的词语分析小说分类 服务实现类
 * </p>
 *
 * @author hty
 * @since 2022-11-08
 */
@Service
@Slf4j
public class WordAnalysisServiceImpl extends ServiceImpl<WordAnalysisMapper, WordAnalysis> implements IWordAnalysisService {

    @Autowired
    private WordAnalysisMapper wordAnalysisMapper;


    public void trainData(String category,String text) throws IOException {
        Integer length = 0;
       // String category = "历史";
        QueryWrapper<WordAnalysis> wordAnalysisQueryWrapper = new QueryWrapper<>();
        wordAnalysisQueryWrapper.eq("category",category);
        List<WordAnalysis> keywordList = this.getBaseMapper().selectList(wordAnalysisQueryWrapper);
        length=keywordList.size();
        //从文件中读取数据
        //String text = FileUtils.readFileToString(new File("C:\\Users\\hyy\\Desktop\\项目文件\\训练文件\\"+category+".txt"), "utf-8");
        //String text = "为国奉献了一辈子的无双国士周扬重生了，回到了1975年插队的那个偏远小山村！ 看着手里的回城调令，这一世他没有犹豫，直接将调令撕得粉碎！ 前世的他猪油蒙心，为了回城抛弃妻女，眼睁睁的看着李幼薇母女蒙难惨死。 重活一世，周扬只想老婆孩子热炕头，宠妻宠女无度！ 偶尔，顺便调教一下这个野蛮的时代！";
        List<WordAnalysis> wordAnalyses = CosineSimilarity.analysisWord(category, text);
        //将文本的数量保存到集合中
        Map<String, WordAnalysis> wordAnalysisMap = new HashMap<>();
        wordAnalyses.stream().forEach((wordAnalysis -> {
            wordAnalysisMap.put(wordAnalysis.getKeyword(),wordAnalysis);
        }));
        Map<String, String> keywordNutureMap = wordAnalyses.stream().collect(Collectors.toMap(WordAnalysis::getKeyword, WordAnalysis::getNuture));
        //如果数据库中能查询到这个词，则加这个词的词频,keywordAnalysis表示从数据库中查询到的数据的item，wordAnalysis表示分析的数据得到的item
        List<WordAnalysis> collect = keywordList.stream().filter(keywordAnalysis -> {
            WordAnalysis wordAnalysis = wordAnalysisMap.get(keywordAnalysis.getKeyword());
            if (wordAnalysis == null) {
                return false;
            }
            Float oldFrquency = wordAnalysis.getFrequency();
            wordAnalysis.setFrequency((oldFrquency + wordAnalysis.getFrequency()));
            wordAnalysisMap.remove(wordAnalysis.getKeyword());
            return true;
        }).collect(Collectors.toList());
        if (!CollectionUtils.isEmpty(collect)){
            this.updateBatchById(collect);
        }


        List<WordAnalysis> addWords = new ArrayList<>();
        //如果在数据库中没有该词存在，则新增一个词
        wordAnalysisMap.forEach((key,value)->{
            WordAnalysis analysis = new WordAnalysis();
            analysis.setCategory(category);
            analysis.setFrequency(value.getFrequency());
            analysis.setKeyword(key);
            String nuture = keywordNutureMap.get(key);
            analysis.setNuture(nuture);
            addWords.add(analysis);
        });
        //添加新词
        length= length+ addWords.size();
        this.saveBatch(addWords);
        List<WordAnalysis> wordAnalysisList = categoryRemoveDuplicate();
        List<String> commonKeyWord = wordAnalysisList.stream().map(wordAnalysis -> wordAnalysis.getKeyword()).collect(Collectors.toList());
        if (length-wordAnalysisList.size()>1500){
            wordAnalysisQueryWrapper = new QueryWrapper<>();
            if (!CollectionUtils.isEmpty(commonKeyWord)){
                wordAnalysisQueryWrapper.notIn("keyword",commonKeyWord);
            }
            wordAnalysisQueryWrapper.orderByDesc("frequency").last("limit "+1500/2+","+1500);
            // Page<WordAnalysis> page = wordAnalysisService.page(new Page<WordAnalysis>(2, 1500/4+1), wordAnalysisQueryWrapper);
            List<WordAnalysis> analysisList =this.getBaseMapper().selectList(wordAnalysisQueryWrapper);
            List<Integer> idList = analysisList.stream().map(wordAnalysis -> wordAnalysis.getId()).collect(Collectors.toList());
            this.getBaseMapper().deleteBatchIds(idList);
        }


    }


    public List<WordAnalysis> categoryRemoveDuplicate(){
        List<String> categoryList = wordAnalysisMapper.getCategory();
        Map<String,List<WordAnalysis>> wordAnalysisMap = new HashMap<>();
        Map<String,Integer> equalMap = new HashMap<>();
        categoryList.forEach(keyword->{
            QueryWrapper<WordAnalysis> wordAnalysisQueryWrapper = new QueryWrapper<>();
            wordAnalysisQueryWrapper.eq("category",keyword);
            List<WordAnalysis> wordAnalysisList = this.getBaseMapper().selectList(wordAnalysisQueryWrapper);
            wordAnalysisList.forEach(wordAnalysis -> {
                Integer count = equalMap.get(wordAnalysis.getKeyword());
                if (count==null||count==0){
                    equalMap.put(wordAnalysis.getKeyword(),1);
                    return;
                }
                count++;
                equalMap.put(wordAnalysis.getKeyword(),count);
            });
        });
        List<String> equalWord = new ArrayList<>();
        //筛选出公共词语
        equalMap.forEach((key,value)->{
            if (value<=categoryList.size()/2||categoryList.size()==1){
                return;
            }
            equalWord.add(key);
        });
        UpdateWrapper<WordAnalysis> wordAnalysisUpdateWrapper = new UpdateWrapper<>();
        List<WordAnalysis> commonWordList = equalWord.stream().map(item -> {
            WordAnalysis wordAnalysis = new WordAnalysis();
            wordAnalysis.setCategory("通用");
            wordAnalysis.setKeyword(item);
            wordAnalysis.setFrequency(1f);
            return wordAnalysis;
        }).collect(Collectors.toList());
        //wordAnalysisService.update(wordAnalysisUpdateWrapper);
        this.getBaseMapper().delete(new QueryWrapper<WordAnalysis>().eq("category","通用"));
        this.saveBatch(commonWordList);
        return commonWordList;
    }

    public String calculateMaxSimilarCategory(String text) throws IOException {
        List<String> categoryList = wordAnalysisMapper.getCategory();
        Map<String, Double> similarMap = new HashMap<>();
        for (String category : categoryList) {
            double similar = calcularteSimilarCategory(text, category);
            similarMap.put(category, similar);
        }
        similarMap.put("max", 0.00);
        Map<String, String> maxSimilarMap = new HashMap<>();
        similarMap.forEach((key, value) -> {
            if (value >= similarMap.get("max")) {
                similarMap.put("max", value);
                if (!"max".equals(key)) {
                    maxSimilarMap.put("max", key);
                }
            }
        });
        return maxSimilarMap.get("max");

    }

    public double calcularteSimilarCategory(String text, String category) throws IOException {
        QueryWrapper<WordAnalysis> wordAnalysisQueryWrapper = new QueryWrapper<>();
        wordAnalysisQueryWrapper.eq("category", category);

        List<WordAnalysis> rowWordAnalyses = this.getBaseMapper().selectList(wordAnalysisQueryWrapper);

        List<WordAnalysis> commonWordList = this.getBaseMapper().selectList(new QueryWrapper<WordAnalysis>().eq("category", "通用"));
        List<String> commonKeyWord = commonWordList.stream().map(wordAnalysis -> wordAnalysis.getKeyword()).collect(Collectors.toList());
        //如果在该分类下的关键词时在common中拥有的词，则该关键词的权重修改为开根号
        List<WordAnalysis> wordAnalyses = rowWordAnalyses.stream().filter(wordAnalysis -> {
            if (commonKeyWord.contains(wordAnalysis.getKeyword())) {
                return false;
            }
            return true;
        }).collect(Collectors.toList());
        //将公共部分降低权重
      /*  List<WordAnalysis> wordAnalyses = rowWordAnalyses.stream().map(wordAnalysis -> {
            if (commonKeyWord.contains(wordAnalysis.getKeyword())) {
                Double rowFrequency = Double.valueOf(wordAnalysis.getFrequency());
                Float frequency = (float) Math.pow(rowFrequency,1/2);
                wordAnalysis.setFrequency(frequency);
                return wordAnalysis;
            }
            return wordAnalysis;
        }).collect(Collectors.toList());*/
        double similarity = CosineSimilarity.getSimilarityImpl(wordAnalyses, text);
        log.info("相似度为：" + similarity);
        return similarity;
    }

    public List<WordAnalysis> getListByCategory(String category) {
        QueryWrapper<WordAnalysis> wordAnalysisQueryWrapper = new QueryWrapper<>();
        wordAnalysisQueryWrapper.eq("category",category);
        List<WordAnalysis> wordAnalysisList = baseMapper.selectList(wordAnalysisQueryWrapper);
        return wordAnalysisList;
    }
}
