package project.Service.Impl;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
import project.Mapper.ChatMapper;
import project.Mapper.WordMapper;
import project.Pojo.Chat.AI.ChatRecord;
import project.Pojo.Chat.AI.DeepSeekRequest;
import project.Pojo.Chat.AI.DeepSeekResponse;
import project.Service.WordService;
import project.Util.TextAnalyzer;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import project.Pojo.WordCount;
@Service
public class WordServiceImpl implements WordService {
    @Autowired
    private WordMapper wordMapper;

    @Autowired
    private ChatMapper chatMapper;
    @Autowired
    private RestTemplate restTemplate;

    @Value("${deepseek.url}")
    private String apiurl;

    @Value("${deepseek.key}")
    private String apipasword;

    @Value("${spring.ai.ollama.chat.model}")
    private String model;
    @Override
    //从消息表中拿消息，并进行分词放入单词表里
    public void processChatContent(String userName) {
        List<ChatRecord> records=chatMapper.getRecordByUserId(userName);
        List<String> contents=new ArrayList<>();
        for(ChatRecord i:records){
            contents.add(i.getMsgContent());
        }

        Map<String,Integer> map=new HashMap<>();

        //删掉旧的统计结果
        wordMapper.deleteByUserName(userName);

        contents.forEach(content->{
            List<String> words= TextAnalyzer.segment(content);
            words.forEach(word -> map.put(word, map.getOrDefault(word, 0) + 1));
        });


        // 插入新的统计结果
        map.entrySet().stream()
                .sorted(Map.Entry.<String, Integer>comparingByValue().reversed())
                .limit(10)
                .forEach(entry -> wordMapper.insert(new WordCount(userName, entry.getKey(), entry.getValue(), null)));
    }

    @Override
    //获取单词表里传入账号的单词详细
    public List<WordCount> getWordCountByUserName(String username) {

        return wordMapper.selectTopWordsByUserName(username);
    }

    @Override
    //生成ai测评
    public String gerAiEvalution(String username) {
        List<WordCount> wordCounts=getWordCountByUserName(username);
        DeepSeekRequest deepSeekRequest = new DeepSeekRequest();
        //构建请求体
        deepSeekRequest.setModel(model);
        List<DeepSeekRequest.Message> messages=new ArrayList<>();
        messages.add(new DeepSeekRequest.Message("" +
                "你是有丰富的心理有关知识的专家\n" +
                "你是用来辅助心理医生生成报告的，你要第三人称客观的分析数据内容"+
                "接下来我会截取与患者对话中出现的最多的10个名词，你对词进行分析，辅助心理医生"+
                "来分析病人的症状，给出一段分析内容，内容简洁"
                ,"system"));
        for (WordCount wc : wordCounts) {
            messages.add(new DeepSeekRequest.Message(wc.getWord()+" 出现次数："+wc.getCount(),"user"));
        }
        deepSeekRequest.setMessages(messages);

        //构建请求头
        HttpHeaders headers = new HttpHeaders();
        headers.setContentType(MediaType.APPLICATION_JSON); // 声明发送JSON数据
        headers.setContentType(MediaType.APPLICATION_JSON);
        headers.set("Authorization","Bearer "+apipasword);

        // 封装请求实体（头+体）
        HttpEntity<DeepSeekRequest> entity = new HttpEntity<>(deepSeekRequest, headers);

        //发送请求：
        ResponseEntity<DeepSeekResponse> response=restTemplate.postForEntity(
                apiurl,
                entity,
                DeepSeekResponse.class
        );


        return response.getBody().getChoices().get(0).getMessage().getContent();
    }


}

