package com.taritari.journal.services;

import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.taritari.journal.entity.po.JournalUser;
import com.taritari.journal.entity.po.TestPo;
import com.taritari.journal.entity.po.WordCloudAnalyse;
import com.taritari.journal.entity.vo.ERNIEBotVo;
import com.taritari.journal.entity.vo.PromptAndDescVo;
import com.taritari.journal.mapper.JournalUserMapper;
import com.taritari.journal.mapper.SentimentClassifyMapper;
import com.taritari.journal.mapper.TestMapper;
import com.taritari.journal.mapper.WordCloudAnalyseMapper;
import com.taritari.journal.services.impl.TestServiceImpl;
import com.taritari.journal.services.impl.WordCloudAnalyseServiceImpl;
import com.taritari.journal.utils.BaiDuApiUtil;
import com.taritari.journal.utils.IkAnalyzerUtils;
import com.taritari.journal.utils.Result;
import io.netty.util.internal.StringUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author taritari
 * @date 2024-05-11 19:51
 * @description
 */
@Slf4j
@Service
public class WordCloudAnalyseService extends ServiceImpl<WordCloudAnalyseMapper, WordCloudAnalyse> implements WordCloudAnalyseServiceImpl {
    @Autowired
    private StringRedisTemplate stringRedisTemplate;
    @Autowired
    private BaiDuApiUtil baiDuApiUtil;
    @Autowired
    private SentimentClassifyMapper sentimentClassifyMapper;
    @Autowired
    private IkAnalyzerUtils ikAnalyzerUtils;
    @Autowired
    private WordCloudAnalyseMapper wordCloudAnalyseMapper;
    @Autowired
    private MyMailService myMailService;
    @Autowired
    private JournalUserMapper journalUserMapper;
    /**
     * 获取prompt通过日记内容
     * */
    public WordCloudAnalyse getWordCloudAnalyse(String wordCloud,String sentiment) {

        // 然后从redis获取百度api的token
        String accessToken = stringRedisTemplate.opsForValue().get("qianFanaccessToken");

        // m没有就重新获取
        if (StringUtil.isNullOrEmpty(accessToken)){
            accessToken = baiDuApiUtil.getQianFanToken();
        }
        //准备数据
        List<Map<String,String >> dataList = new ArrayList<>();
        Map<String,String> userContent = new HashMap<>(8);
        Map<String,Object> dataMap = new HashMap<>(8);
        userContent.put("role","user");
        //设置具体prompt
        StringBuffer messages = new StringBuffer("你现在是日记应用的心情邮件管理员，需要通过我的日记词云数据：");
        messages.append(wordCloud);
        messages.append(",和心情指数：");
        messages.append(sentiment);
        messages.append(",对数据进行分析，然后根据我的要求响应。主题需积极温暖，分析写心情数据的分析，内容迎合主题，风格温柔，然后提出建议。注意：生成的文本中不要出现任何词云数据和心情指数数据，年需要从朋友的角度来写文章内容。我只要主题，分析，总结，建议这四部分内容，不需要返回其他任何内容，需要以json格式发给我：{\n" +
                "\"theme\": \"\",analyse:\"\",\n" +
                "\"content\": \"\",\n" +
                "\"suggest\":\"\":\n" +
                "}");
        userContent.put("content",messages.toString());
        dataList.add(userContent);
        dataMap.put("messages",dataList);
        String jsonString = JSONObject.toJSONString(dataMap);
        ERNIEBotVo ernieBotVo = baiDuApiUtil.wenXinWorkShop(accessToken, jsonString);
        String commentContent = ernieBotVo.getResult();
        cn.hutool.json.JSONObject jsonObject = JSONUtil.parseObj(commentContent.substring(7, commentContent.length() - 4));
        WordCloudAnalyse wordCloudAnalyse = new WordCloudAnalyse();
        wordCloudAnalyse=jsonObject.toBean(WordCloudAnalyse.class);
        return wordCloudAnalyse;
    }

    @Override
    public WordCloudAnalyse getAnalyse(Integer authorId) {
        WordCloudAnalyse wordCloudData = generateWordCloudByAuthorId(authorId);
        if (wordCloudData!=null){
            System.out.println("已经有数据咯~");
            return wordCloudData;
        }
        List<String> wordList = sentimentClassifyMapper.queryJournalNumberByAuthorId(authorId);
        String wordCloud = ikAnalyzerUtils.wordCloudParsing(wordList, 25);
        Map<String, Double> stringDoubleMap = sentimentClassifyMapper.querySentimentByAuthorId(authorId);

        WordCloudAnalyse wordCloudAnalyse = getWordCloudAnalyse(wordCloud,stringDoubleMap.toString());
        wordCloudAnalyse.setUserId(authorId);
        wordCloudAnalyse.setWordCloud(wordCloud);
        wordCloudAnalyse.setCreateTime(DateUtil.date());
        int insert = wordCloudAnalyseMapper.insert(wordCloudAnalyse);
        return wordCloudAnalyse;
    }
    /**
     * 发送邮件
     * */
    @Override
    public Result sendWordCloudEmail(Integer authorId) {
        WordCloudAnalyse wordCloudAnalyse = generateWordCloudByAuthorId(authorId);
        JournalUser journalUser = journalUserMapper.selectById(authorId);
        Result result = myMailService.sendWordCloudReport(wordCloudAnalyse,journalUser.getEmail());
        return result;
    }
    /**
     * 通过id获取今天的词云数据
     * */
    @Override
    public WordCloudAnalyse generateWordCloudByAuthorId(Integer authorId) {
        //获取今天的日期
        String today= DateUtil.today();
        //查询是否已经存在
        LambdaQueryWrapper<WordCloudAnalyse> queryWrapper = new LambdaQueryWrapper<>();
        queryWrapper.eq(WordCloudAnalyse::getUserId,authorId);
        queryWrapper.like(WordCloudAnalyse::getCreateTime,today);
        WordCloudAnalyse wordCloudData = wordCloudAnalyseMapper.selectOne(queryWrapper);
        return wordCloudData;
    }
}
