package com.taritari.journal.services;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.taritari.journal.entity.dto.SentimentMqDto;
import com.taritari.journal.entity.po.WordCloud;
import com.taritari.journal.entity.po.WordCloudAnalyse;
import com.taritari.journal.entity.vo.ERNIEBotVo;
import com.taritari.journal.entity.vo.SentimentClassify;
import com.taritari.journal.entity.vo.SentimentClassifyV1Item;
import com.taritari.journal.entity.vo.SentimentClassifyV1Vo;
import com.taritari.journal.enums.ResultCode;
import com.taritari.journal.mapper.JournalsMapper;
import com.taritari.journal.mapper.SentimentClassifyMapper;
import com.taritari.journal.services.impl.SentimentClassifyServiceImpl;
import com.taritari.journal.utils.BaiDuApiUtil;
import com.taritari.journal.utils.DataUtil;
import com.taritari.journal.utils.IkAnalyzerUtils;
import com.taritari.journal.utils.Result;
import io.netty.util.internal.StringUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 * @author taritari
 * @date 2024-01-11 20:18
 * @description 情感趋势
 */
@Service
public class SentimentClassifyService extends ServiceImpl<SentimentClassifyMapper, SentimentClassify> implements SentimentClassifyServiceImpl {
    @Autowired
    private SentimentClassifyMapper sentimentClassifyMapper;
    @Autowired
    private DataUtil dataUtil;
    @Autowired
    private BaiDuApiUtil baiDuApiUtil;
    @Autowired
    private StringRedisTemplate stringRedisTemplate;
    @Autowired
    private IkAnalyzerUtils ikAnalyzerUtils;
    /**
     * 通过用户ID获取情感趋势
     * */
    @Override
    public  List<SentimentClassify>  getSentimentClassifyById(int authorId) {
        // 使用 LambdaQueryWrapper 替代 QueryWrapper，并且使用 Wrappers.lambdaQuery() 创建实例
        LambdaQueryWrapper<SentimentClassify> lambdaQueryWrapper = Wrappers.lambdaQuery();
        // 使用 Lambda 表达式设置查询条件
        lambdaQueryWrapper.eq(SentimentClassify::getAuthorId, authorId);
        List<SentimentClassify> sentimentClassifies = sentimentClassifyMapper.selectList(lambdaQueryWrapper);
        // 执行查询并返回结果
        return sentimentClassifies;
    }
    /**
     * 修改或添加情感趋势
     * */
    @Override
    public Result updateSentimentClassify(SentimentMqDto sentimentMqDto) {
        // 判断是否有情感趋势数据
        int isExit = sentimentClassifyMapper.querySentimentIsExit(sentimentMqDto.getNumber());
        // 然后从redis获取百度api的token
        String accessToken = stringRedisTemplate.opsForValue().get("accessToken");
        // m没有就重新获取
        if (StringUtil.isNullOrEmpty(accessToken)){
            accessToken = baiDuApiUtil.getAccessToken();
        }
        Map<String,String> body = new HashMap<>(8);
        body.put("text",sentimentMqDto.getText());
        //情感趋势接口添加数据
        String mapToJSON = JSONObject.toJSONString(body);
        SentimentClassifyV1Vo sentimentClassifyV1Vo = baiDuApiUtil.baiDuSentimentClassify(accessToken, mapToJSON);
        SentimentClassify sentimentClassify = new SentimentClassify();
        sentimentClassify.setAuthorId(Integer.parseInt(sentimentMqDto.getAuthorId()));
        sentimentClassify.setText(sentimentMqDto.getText());
        sentimentClassify.setJournalNumber(sentimentMqDto.getNumber());
        SentimentClassifyV1Item item = sentimentClassifyV1Vo.getItems().get(0);
        sentimentClassify.setSentiment(item.getSentiment());
        sentimentClassify.setConfidence(item.getConfidence());
        sentimentClassify.setNegativeProb(item.getNegative_prob());
        sentimentClassify.setPositiveProb(item.getPositive_prob());
        int res ;
        if (isExit>0){//存在，做update
            LambdaQueryWrapper<SentimentClassify> queryWrapper = new LambdaQueryWrapper<>();
            queryWrapper.eq(SentimentClassify::getJournalNumber,sentimentMqDto.getNumber());
            res = sentimentClassifyMapper.update(sentimentClassify, queryWrapper);
        }else {//不存在，做insert
            res = sentimentClassifyMapper.insert(sentimentClassify);
        }
        return Result.success(res);
    }
    /**
     * 通过journalNumber查询情感趋势详情
     * */
    @Override
    public SentimentClassify selectOneByJournalNumber(String number) {
        LambdaQueryWrapper<SentimentClassify> lambdaQueryWrapper = new LambdaQueryWrapper<>();
        lambdaQueryWrapper.eq(SentimentClassify::getJournalNumber,number);
        SentimentClassify sentimentClassify = sentimentClassifyMapper.selectOne(lambdaQueryWrapper);
        return sentimentClassify;
    }
    /**
     * 通过作者id查询所有的日记的词云
     * */
    @Override
    public Map<String,List<WordCloud>> selectJournalNumberByAuthorId(int authorId) {

        List<String> wordList = sentimentClassifyMapper.queryJournalNumberByAuthorId(authorId);
        List<Map<String, Object>> maps = ikAnalyzerUtils.wordCloudList(wordList, 25);
        List<WordCloud> wordClouds = new ArrayList<>();
        for (Map<String, Object> map : maps) {
            WordCloud wordCloud = new WordCloud();
            String  name = map.get("name").toString();
            wordCloud.setName(name);
            wordClouds.add(wordCloud);
        }
        Map<String,List<WordCloud>> wordCloudMap = new HashMap<>(8);
        wordCloudMap.put("series",wordClouds);
        return wordCloudMap;
    }

    /**
     * 获取用户今日日记的标签-方便树洞隔离某个标签
     * */
    public String getNotTag(String content,String authorId) throws IOException {
        // 然后从redis获取百度api的token
        String accessToken = stringRedisTemplate.opsForValue().get("qianFanaccessToken");

        // m没有就重新获取
        if (StringUtil.isNullOrEmpty(accessToken)){
            accessToken = baiDuApiUtil.getQianFanToken();
        }
        //准备数据
        List<Map<String,String >> dataList = new ArrayList<>();
        Map<String,String> userContent = new HashMap<>(8);
        Map<String,Object> dataMap = new HashMap<>(8);
        userContent.put("role","user");
        //设置具体prompt
        StringBuffer messages = new StringBuffer("你现在是一个日记应用的情感分析助手，你需要根据日记内容在下例关键词中选出最不符合的一个关键词，然后把关键词返回给我。注意，只返回关键词。例如：关键词为情感，就只返回情感两个字,不要出现任何前缀说明。 关键词前不要带任何字。关键词:日常,情感,烦恼,吐槽,其他。 日记内容:");
        messages.append(content);
        userContent.put("content",messages.toString());

        dataList.add(userContent);
        dataMap.put("messages",dataList);
        String jsonString = JSONObject.toJSONString(dataMap);
        ERNIEBotVo ernieBotVo = baiDuApiUtil.wenXinWorkShop(accessToken, jsonString);
        String tag = ernieBotVo.getResult();
        stringRedisTemplate.opsForValue().set(authorId,tag,24 * 60 * 60, TimeUnit.SECONDS);
        return tag;
    }

    /**
     * 获取用户情感趋势统计结果
     * */
    public Map<String,List<Object>> getSentimentCountById(int id){
        List<SentimentClassify> sentimentClassifyById =getSentimentClassifyById(id);
        if (sentimentClassifyById.isEmpty()){
            return null;
        }
        Map<String,List<Object>> sentimentMap = new HashMap<>(8);
        List<Object> dataList = new ArrayList<>();
        Map<String,List<Object>> dataMap = new HashMap<>(8);
        Map<String,Object> positiveMap = new HashMap<>(8);
        Map<String,Object> negativeMap = new HashMap<>(8);
        List<Object> seriesList = new ArrayList<>();
        double positiveSum = 0;
        double negativeSum = 0;
        for (SentimentClassify classify : sentimentClassifyById){
            float positiveProb = classify.getPositiveProb();
            float negativeProb = classify.getNegativeProb();
            positiveSum += positiveProb;
            negativeSum += negativeProb;
        }
        int size = sentimentClassifyById.size();
        double positive = dataUtil.threeDecimal((positiveSum/size)*100);
        double negative = dataUtil.threeDecimal((negativeSum/size)*100);
        positiveMap.put("name","积极");
        positiveMap.put("value",positive);
        negativeMap.put("name","消极");
        negativeMap.put("value",negative);

        dataList.add(positiveMap);
        dataList.add(negativeMap);

        dataMap.put("data",dataList);
        seriesList.add(dataMap);
        sentimentMap.put("series",seriesList);
        return sentimentMap;
    }
}
