package com.wt.admin.service.language.impl;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import com.aizuda.easy.security.code.BasicCode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.wt.admin.config.ConstVar;
import com.wt.admin.config.prop.IndexProp;
import com.wt.admin.config.socket.WebSocketSessionManager;
import com.wt.admin.domain.dto.language.ClassificationTestTrainingDTO;
import com.wt.admin.domain.dto.language.KeyWordForSentenceDTO;
import com.wt.admin.domain.dto.language.QATestTrainingDTO;
import com.wt.admin.domain.dto.language.SentenceConfigDTO;
import com.wt.admin.domain.entity.language.KeywordsEntity;
import com.wt.admin.domain.entity.language.QAEntity;
import com.wt.admin.domain.entity.model.ModelListEntity;
import com.wt.admin.domain.model.LanguageModel;
import com.wt.admin.domain.model.QAModel;
import com.wt.admin.domain.vo.language.KeyParameterModelMapperVO;
import com.wt.admin.domain.vo.language.KeyWordModelMapperVO;
import com.wt.admin.domain.vo.language.ParseSentenceVO;
import com.wt.admin.domain.vo.language.SentenceVO;
import com.wt.admin.domain.vo.socket.ProgressVO;
import com.wt.admin.domain.vo.socket.SocketVO;
import com.wt.admin.util.AssertUtil;
import jakarta.annotation.Resource;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.dromara.easyai.config.SentenceConfig;
import org.dromara.easyai.entity.KeyWordForSentence;
import org.dromara.easyai.entity.SentenceModel;
import org.dromara.easyai.entity.TalkBody;
import org.dromara.easyai.naturalLanguage.TalkToTalk;
import org.dromara.easyai.naturalLanguage.languageCreator.CatchKeyWord;
import org.dromara.easyai.naturalLanguage.languageCreator.KeyWordModel;
import org.dromara.easyai.naturalLanguage.word.MyKeyWord;
import org.dromara.easyai.naturalLanguage.word.WordEmbedding;
import org.dromara.easyai.rnnJumpNerveCenter.RRNerveManager;
import org.springframework.stereotype.Service;

import java.io.File;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;


@Service
@Slf4j
public class LanguageTrainingService {

    @Resource
    private IndexProp indexProp;
    private final ObjectMapper mapper = new ObjectMapper();
    public static final String CLASS = "classification";
    public static final String QA = "qa";

    /**
     * 语句和关键词的学习
     */
    @SneakyThrows
    public void classTraining(List<SentenceVO> list, SentenceConfigDTO config,String remark, Function<Boolean, ModelListEntity> fun) {
        AssertUtil.objIsNull(config, BasicCode.BASIC_CODE_99999);
        AssertUtil.List.isEmpty(list, BasicCode.BASIC_CODE_99999);
        Collections.shuffle(list);
        List<String> sentence = new ArrayList<>(list.size());
        Map<Integer, List<String>> typeIdBySentences = new HashMap<>();
        Map<Integer, Map<Integer, List<KeyWordForSentenceDTO>>> sensorKeyWordMapper = new ConcurrentHashMap<>();
        for (int i = 0; i < list.size(); i++) {
            SentenceVO sentenceVO = list.get(i);
            Map<Integer, List<KeyWordForSentenceDTO>> cache = sensorKeyWordMapper
                    .computeIfAbsent(sentenceVO.getClassificationId(), k -> new HashMap<>());
            // 获取语句
            sentence.add(sentenceVO.getSentence());
            // 对语句进行类型分组
            List<String> strings = typeIdBySentences.computeIfAbsent(sentenceVO.getClassificationId(), k -> new ArrayList<>(list.size()));
            strings.add(sentenceVO.getSentence());
            // 按类型分组，对象为 语句 和 对应语句的关键词
            sentenceVO.getTaggings().forEach(k -> {
                KeywordsEntity keywordsEntity = BeanUtil.toBean(k, KeywordsEntity.class);
                KeyWordForSentenceDTO keyWordForSentence = new KeyWordForSentenceDTO();
                keyWordForSentence.setSentence(sentenceVO.getSentence());
                keyWordForSentence.setKeyWord(StrUtil.isBlank(keywordsEntity.getTagging()) ? "" : keywordsEntity.getTagging());
                keyWordForSentence.setId(keywordsEntity.getId());
                keyWordForSentence.setReply(keywordsEntity.getReplies());
                List<KeyWordForSentenceDTO> key = cache.computeIfAbsent(keywordsEntity.getId(), j -> new ArrayList<>());
                key.add(keyWordForSentence);
            });
        }
        WordEmbedding wordEmbedding = new WordEmbedding();
        RRNerveManager rrNerveManager = new RRNerveManager(wordEmbedding);
        LanguageModel models = new LanguageModel();
        config.setTypeNub(typeIdBySentences.keySet().size());
        models.setConfig(config);
        send(30);
        wordEmbedding.setConfig(config);
        log.debug("训练配置信息：{}", JSONUtil.toJsonStr(config));
        rrNerveManager.init(config);
        wordEmbedding(sentence, config, wordEmbedding, models);
        log.debug("随机神经网络学习 每个分类样本不够300条，则重复数据到300条,20 * 300 = 6000");
        models.setRandomModel(rrNerveManager.studyType(typeIdBySentences));
        keyWordMapperMap(config, wordEmbedding, models,sensorKeyWordMapper);
        boolean b = selfChecking(list, rrNerveManager);
        if (b) {
            ModelListEntity entity = fun.apply(b);
            if (ObjectUtil.isNotEmpty(entity)) {
                FileUtil.writeUtf8String(JSONUtil.toJsonPrettyStr(models), String.format(indexProp.getModelPath().getLangeModel(),remark));
            }
        }
    }

    private boolean selfChecking(List<SentenceVO> list, RRNerveManager rrNerveManager) throws Exception {
        int num = 0;
        for (int i = 0; i < list.size(); i++) {
            SentenceVO sentence = list.get(i);
            if (rrNerveManager.getType(sentence.getSentence(), i) == sentence.getClassificationId()) {
                num++;
            }
        }
        double point = num / (double) list.size();
        System.out.println("准确率:" + point + ",检测数量:" + num);
        return point > 0.1;
    }

    public ParseSentenceVO parseSentence(List<SentenceVO> list,ClassificationTestTrainingDTO data, String tag) throws Exception {

        File file = new File(String.format(indexProp.getModelPath().getLangeModel(),data.getModel()));
        if(!file.exists()){
            return null;
        }

        Map<Integer, Map<Integer, List<KeyWordForSentenceDTO>>> sensorKeyWordMapper = new ConcurrentHashMap<>();
        for (int i = 0; i < list.size(); i++) {
            SentenceVO sentenceVO = list.get(i);
            Map<Integer, List<KeyWordForSentenceDTO>> cache = sensorKeyWordMapper
                    .computeIfAbsent(sentenceVO.getClassificationId(), k -> new HashMap<>());
            sentenceVO.getTaggings().forEach(k -> {
                KeywordsEntity keywordsEntity = BeanUtil.toBean(k, KeywordsEntity.class);
                KeyWordForSentenceDTO keyWordForSentence = new KeyWordForSentenceDTO();
                keyWordForSentence.setSentence(sentenceVO.getSentence());
                keyWordForSentence.setKeyWord(StrUtil.isBlank(keywordsEntity.getTagging()) ? "" : keywordsEntity.getTagging());
                keyWordForSentence.setId(keywordsEntity.getId());
                keyWordForSentence.setReply(keywordsEntity.getReplies());
                List<KeyWordForSentenceDTO> key = cache.computeIfAbsent(keywordsEntity.getId(), j -> new ArrayList<>());
                key.add(keyWordForSentence);
            });
        }

        WordEmbedding wordEmbedding = new WordEmbedding();
        RRNerveManager rrNerveManager = new RRNerveManager(wordEmbedding);
        LanguageModel model = JSONUtil.toBean(FileUtil.readUtf8String(file), LanguageModel.class);
        SentenceConfig config = model.getConfig();
        wordEmbedding.setConfig(config);
        rrNerveManager.init(config);
        wordEmbedding.insertModel(model.getWordTwoVectorModel(), config.getWordVectorDimension());
        rrNerveManager.insertModel(model.getRandomModel());

        Map<Integer, MyKeyWord> myKeyWordMap = new HashMap<>();
        keyWordMapperMapDeserialize(config,wordEmbedding,model,myKeyWordMap);

        Map<Integer, CatchKeyWord> catchKeyWordMap = new HashMap<>();
        keyWordDeserialize(model,catchKeyWordMap);

        // 获得语句对应的id
        int type = rrNerveManager.getType(data.getData(), System.currentTimeMillis());
        MyKeyWord myKeyWord = myKeyWordMap.get(type);
        if (ObjectUtil.isEmpty(myKeyWord)) {
            return new ParseSentenceVO(type);
        }
        // 语句是否有关键词
//        boolean keyWord = myKeyWord.isKeyWord(data, System.currentTimeMillis());
//        if(!keyWord){
//            return new ParseSentenceVO(type);
//        }
        List<ParseSentenceVO.Keywords> keyWordList = new ArrayList<>();
        Map<Integer, List<KeyWordForSentenceDTO>> integerListMap = sensorKeyWordMapper.get(type);
        integerListMap.forEach((K,V) -> {
            CatchKeyWord catchKeyWord = catchKeyWordMap.get(K);
            if (ObjectUtil.isEmpty(catchKeyWord)) {
                return;
            }
            KeyWordForSentenceDTO keyWordForSentenceDTO = V.get(0);
            Set<String> keyWordSet = catchKeyWord.getKeyWord(data.getData());
            boolean b = keyWordSet.stream().anyMatch(StrUtil::isBlank);
            String str = null;
            if (keyWordSet.isEmpty() || b) {
                str = keyWordForSentenceDTO.getReply();
            }
            keyWordList.add(new ParseSentenceVO.Keywords(K, keyWordSet, str));
        });
        return new ParseSentenceVO(type, keyWordList);
    }

    @SneakyThrows
    public void QATraining(List<QAEntity> list, SentenceConfigDTO config,String remark, Function<Boolean, ModelListEntity> fun) {
        if (ObjectUtil.isEmpty(config) || CollUtil.isEmpty(list)) {
            return;
        }
        Pattern pattern = Pattern.compile("@我:\\s*(.*?)(?=@AI:|$)@AI:\\s*(.*?)(?=@我:|$)", Pattern.DOTALL);
        SentenceModel sentenceModel = new SentenceModel();
        List<TalkBody> sentences = new ArrayList<>();
        list.forEach(k -> {
            Matcher matcher = pattern.matcher(k.getContent());
            while (matcher.find()) {
                String question = matcher.group(1).trim();
                String answer = matcher.group(2).trim();
                sentenceModel.setSentence(question);
                sentenceModel.setSentence(answer);
                TalkBody talkBody = new TalkBody();
                talkBody.setQuestion(question);
                talkBody.setAnswer(answer);
                sentences.add(talkBody);
            }
        });
        send(50);
        Collections.shuffle(sentences);
        config.setTypeNub(0);
        log.debug("qa 训练配置 {}", JSONUtil.toJsonStr(config));

        TalkToTalk talkToTalk = new TalkToTalk(config.getTf());
        // 写文件
        QAModel model = new QAModel(null, talkToTalk.study(sentences,null));
        model.setConfig(config.getTf());
        String jsonModel = mapper.writeValueAsString(model);
        ModelListEntity entity = fun.apply(true);
        FileUtil.writeUtf8String(jsonModel, String.format(indexProp.getModelPath().getQaModel(), remark));
        send(95);
    }

    @SneakyThrows
    public String qaParseSentence(QATestTrainingDTO data) {
        File file = new File(String.format(indexProp.getModelPath().getQaModel(),data.getModel()));
        if(!file.exists()){
            return null;
        }
        QAModel model = mapper.readValue(FileUtil.readUtf8String(file), QAModel.class);
        TalkToTalk talkToTalk = new TalkToTalk(model.getConfig());
        talkToTalk.insertModel(model.getTransFormerModel());
        String answer = talkToTalk.getAnswer(data.getData(), System.currentTimeMillis());
        log.debug("question={} answer={}",data.getData(),answer);
        return answer;
    }

    /**
     * 词向量学习
     *
     * @param sentence 语句
     */
    private void wordEmbedding(List<String> sentence, SentenceConfig sentenceConfig, WordEmbedding wordEmbedding, LanguageModel models) throws Exception {
        log.debug("词向量学习 {}", sentence.size());
        SentenceModel sentenceModel = new SentenceModel();
        sentence.forEach(sentenceModel::setSentence);
        wordEmbedding.init(sentenceModel, sentenceConfig.getWordVectorDimension());// 放入语句 和 词向量维度
        // 词向量开始学习
        models.setWordTwoVectorModel(wordEmbedding.start());
        send(50);
    }

    /**
     * 关键词敏感性嗅探模型
     */
    private void keyWordMapperMap(SentenceConfig sentenceConfig, WordEmbedding wordEmbedding, LanguageModel models,Map<Integer, Map<Integer, List<KeyWordForSentenceDTO>>> sensorKeyWordMapper) {
        // 键词敏感性嗅探模型
        List<KeyParameterModelMapperVO> keyParameterModelMapperVOS = new ArrayList<>();
        // 关键词抓取模型
        List<KeyWordModelMapperVO> keyWordModelMapperVOS = new ArrayList<>();
        sensorKeyWordMapper.forEach((key, value) -> {
            value.forEach((key1, value1) -> {
                // 一个CatchKeyWord只能对一个种类的关键词类别进行捕获，它的关键词类别ID与嗅探类MyKeyWord的ID是一一对应的。
                // 主键是设定好的关键词类别ID,值是该类别ID下的句子与它的关键词集合。
                try {
                    List<KeyWordForSentence> list = value1.stream().map(i -> (KeyWordForSentence) i).toList();
                    //键词敏感性嗅探模型 sentenceConfig.setShowLog(false); 有效
                    MyKeyWord mk = new MyKeyWord(sentenceConfig, wordEmbedding);
                    keyParameterModelMapperVOS.add(new KeyParameterModelMapperVO(key1, mk.study(list)));
                    // 关键词抓取模型
                    CatchKeyWord catchKeyWord = new CatchKeyWord();
                    catchKeyWord.study(list); //耗时的过程
                    KeyWordModel keyWordModel = catchKeyWord.getModel();
                    keyWordModelMapperVOS.add(new KeyWordModelMapperVO(key, keyWordModel));
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }
            });
        });
        models.setKeyWord(keyWordModelMapperVOS);
        models.setKeyParameter(keyParameterModelMapperVOS);
        send(80);
    }

    private void keyWordMapperMapDeserialize(SentenceConfig sentenceConfig, WordEmbedding wordEmbedding, LanguageModel model,Map<Integer, MyKeyWord> myKeyWordMap) throws Exception {
        for (KeyParameterModelMapperVO haveKey : model.getKeyParameter()) {
            MyKeyWord myKeyWord = new MyKeyWord(sentenceConfig, wordEmbedding);
            myKeyWord.insertModel(haveKey.getParameter());
            myKeyWordMap.put(haveKey.getKey(), myKeyWord);
        }
    }


    private void keyWordDeserialize(LanguageModel model,Map<Integer, CatchKeyWord> catchKeyWordMap) {
        for (KeyWordModelMapperVO keyWordModelMapping : model.getKeyWord()) {
            int key = keyWordModelMapping.getKey();
            CatchKeyWord catchKeyWord = new CatchKeyWord();
            catchKeyWordMap.put(key, catchKeyWord);
            catchKeyWord.insertModel(keyWordModelMapping.getModel());
        }
    }

    private void send(int current) {
        WebSocketSessionManager.sendToAll(new SocketVO<>
                (ConstVar.Socket.PROGRESS, ProgressVO.set(current)));
    }


}
