package ai.people.netmon.warning.service.impl;

import ai.people.core.grpc.lib.dict.DictServiceGrpc;
import ai.people.netmon.framework.domain.common.NameValueEntity;
import ai.people.netmon.framework.domain.common.TypeValueEntity;
import ai.people.netmon.framework.domain.warning.entity.WarningSubjectAnalysis;
import ai.people.netmon.framework.domain.warning.request.subject.SubjectModelRequest;
import ai.people.netmon.framework.domain.warning.request.subject.SubjectWarningRequest;
import ai.people.netmon.framework.domain.warning.vo.MessageContentVO;
import ai.people.netmon.framework.domain.warning.vo.TrendAnalysisRespVO;
import ai.people.netmon.framework.exception.enums.TopicExceptionEnum;
import ai.people.netmon.framework.model.response.PageResponse;
import ai.people.netmon.framework.utils.AssertUtils;
import ai.people.netmon.utils.localdateUtil.DateTimeFormatterUtil;
import ai.people.netmon.warning.mapper.WarningSubjectAnalysisMapper;
import ai.people.netmon.warning.service.WarningSpeechTemplateService;
import ai.people.netmon.warning.service.WarningSubjectAnalysisService;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.huaban.analysis.jieba.keyword.Keyword;
import com.huaban.analysis.jieba.keyword.TFIDFAnalyzer;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import net.devh.boot.grpc.client.inject.GrpcClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StopWatch;
import org.springframework.util.StringUtils;

import javax.annotation.PostConstruct;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @author ligua
 * @description 针对表【warning_subject_analysis(预警主题分析关系表)】的数据库操作Service实现
 * @createDate 2022-07-28 17:50:38
 */
@Slf4j
@Service
@RequiredArgsConstructor
public class WarningSubjectAnalysisServiceImpl extends ServiceImpl<WarningSubjectAnalysisMapper, WarningSubjectAnalysis>
        implements WarningSubjectAnalysisService {

    @Value("${warning.analysis.cache.switch}")
    private Boolean analysisCacheSwitch;

    @GrpcClient(value = "NM-SERVICE-SYSTEM")
    private DictServiceGrpc.DictServiceBlockingStub dictServiceBlockingStub;

    private final WarningSubjectAnalysisMapper warningSubjectAnalysisMapper;

    @Autowired
    private WarningSpeechTemplateService<SubjectWarningRequest, MessageContentVO> warningSubjectServiceImpl;

    Cache<String, List<MessageContentVO>> caffeineCache = null;

    @PostConstruct
    public void init(){
        caffeineCache = Caffeine.newBuilder()
                .expireAfterWrite(1, TimeUnit.DAYS)
                .maximumSize(1024 * 1024 * 50)
                .build();
    }

    /**
     * 添加主题分析
     *
     * @param warningSubjectAnalysis 警告主题分析
     */
    @Override
    public void addSubjectAnalysis(WarningSubjectAnalysis warningSubjectAnalysis) {
        int insertResult = warningSubjectAnalysisMapper.insert(warningSubjectAnalysis);
        AssertUtils.isTrue(insertResult > 0, TopicExceptionEnum.FAILED_TO_JOIN_ANALYSIS);
    }

    /**
     * 删除主题分析
     *
     * @param userId    用户id
     * @param subjectId 对象id
     */
    @Override
    public void deleteSubjectAnalysis(Long userId, Long subjectId) {
        LambdaQueryWrapper<WarningSubjectAnalysis> lambdaQuery = Wrappers.lambdaQuery(WarningSubjectAnalysis.class);
        lambdaQuery.eq(WarningSubjectAnalysis::getAnalysisSubjectId, subjectId)
                .eq(WarningSubjectAnalysis::getAnalysisUserId, userId);
        WarningSubjectAnalysis warningSubjectAnalysis = warningSubjectAnalysisMapper.selectOne(lambdaQuery);
        if (Objects.nonNull(warningSubjectAnalysis)) {
            warningSubjectAnalysisMapper.deleteById(warningSubjectAnalysis);
        }
    }

    /**
     * 得到消息内容vos
     *
     * @param reqParam 要求参数
     * @return {@link List}<{@link MessageContentVO}>
     */
    private List<MessageContentVO> getMessageContentVos(SubjectModelRequest reqParam) {
        String cacheKey = String.format("warning:analysis:subject:%s:%s-%s",reqParam.getSubjectId(),
                reqParam.getStartDate() != null ? reqParam.getStartDate().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli(): 0,
                reqParam.getEndDate() != null ? reqParam.getEndDate().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli(): 0);
        List<MessageContentVO> voList = caffeineCache.getIfPresent(cacheKey);
        if(voList != null && analysisCacheSwitch){
            return voList;
        }
        List<MessageContentVO> vos = Lists.newArrayList();
        SubjectWarningRequest subjectWarningRequest = new SubjectWarningRequest();
        subjectWarningRequest.setSubjectId(reqParam.getSubjectId());
        if(reqParam.getStartDate() != null){
            subjectWarningRequest.setStartTime(reqParam.getStartDate());
        }
        if(reqParam.getEndDate() != null){
            subjectWarningRequest.setEndTime(reqParam.getEndDate());
        }
        subjectWarningRequest.setPageNo(0);
        subjectWarningRequest.setPageSize(10000);
        PageResponse<MessageContentVO> listPageResponse = warningSubjectServiceImpl.searchQuery(subjectWarningRequest);
        if(listPageResponse != null && !CollectionUtils.isEmpty(listPageResponse.getList())){
            vos = (List<MessageContentVO>) listPageResponse.getList();
        }
        caffeineCache.put(cacheKey, vos);
        return vos;
    }

    /**
     * 情感分析
     *
     * @param reqParam 要求参数
     * @return {@link List}<{@link TypeValueEntity}<{@link String}, {@link String}>>
     */
    @Override
    public List<TypeValueEntity<String, Long>> emotionAnalysis(SubjectModelRequest reqParam) {
        StopWatch stopWatch = new StopWatch();
        stopWatch.start(String.format("查询主题下的舆情数据， SubjectModelRequest:%s", reqParam));
        List<TypeValueEntity<String, Long>> list = Lists.newArrayList();
        List<MessageContentVO> vos = getMessageContentVos(reqParam);
        stopWatch.stop();
        stopWatch.start(String.format("主题情感分析， SubjectModelRequest:%s", reqParam));
        if(!CollectionUtils.isEmpty(vos)){
            Map<String, Long> map = vos.stream().filter(o-> !StringUtils.isEmpty(o.getEmotion())).collect(Collectors.groupingBy(MessageContentVO::getEmotion, Collectors.counting()));
            map.keySet().forEach(key->{
                list.add(TypeValueEntity.<String,Long>builder().type(key).value(map.get(key)).build());
            });
        }
        stopWatch.stop();
        log.info("情感分析耗时分析：\n" + stopWatch.prettyPrint());
        return list;
    }

    /**
     * 源分析
     *
     * @param reqParam 要求参数
     * @return {@link List}<{@link TypeValueEntity}<{@link String}, {@link String}>>
     */
    @Override
    public List<TypeValueEntity<String, Long>> sourceAnalysis(SubjectModelRequest reqParam) {
        List<TypeValueEntity<String, Long>> list = Lists.newArrayList();
        List<MessageContentVO> vos = getMessageContentVos(reqParam);
        if(!CollectionUtils.isEmpty(vos)){
            Map<String, Long> map = vos.stream().filter(o-> !StringUtils.isEmpty(o.getPlatform())).collect(Collectors.groupingBy(MessageContentVO::getPlatform, Collectors.counting()));
            map.keySet().forEach(key->{
                list.add(TypeValueEntity.<String,Long>builder().type(key).value(map.get(key)).build());
            });
        }
        return list;
    }

    /**
     * 趋势分析
     *
     * @param reqParam 要求参数
     * @return {@link List}<{@link TrendAnalysisRespVO}>
     */
    @Override
    public List<TrendAnalysisRespVO> trendAnalysis(SubjectModelRequest reqParam) {
        List<TrendAnalysisRespVO> resultList = Lists.newArrayList();
        List<MessageContentVO> vos = getMessageContentVos(reqParam);
        if(!CollectionUtils.isEmpty(vos)){
            Map<String, List<MessageContentVO>> map = vos.stream().filter(o-> !StringUtils.isEmpty(o.getEmotion())).collect(Collectors.groupingBy(MessageContentVO::getEmotion));
            map.keySet().forEach(key->{
                Map<String, Long> collect = map.get(key).stream().filter(o-> !StringUtils.isEmpty(o.getEmotion())).collect(Collectors.groupingBy(o -> DateTimeFormatter.ofPattern("yyyy-MM-dd").format(o.getTime()), Collectors.counting()));
                collect.keySet().forEach(year->{
                    resultList.add(TrendAnalysisRespVO.builder().category(key).year(year).value(collect.get(year)).build());
                });
            });
        }
        return  paddingList(resultList);
    }

    /**
     * 关键词分析
     *
     * @param reqParam 要求参数
     * @return {@link List}<{@link NameValueEntity}<{@link String}, {@link String}>>
     */
    @Override
    public List<NameValueEntity<String, Double>> keywordAnalysis(SubjectModelRequest reqParam) {
        List<NameValueEntity<String, Double>> list = Lists.newArrayList();
        List<MessageContentVO> vos = getMessageContentVos(reqParam);
        Map<String, Double> keywordMap = Maps.newHashMap();
        if(!CollectionUtils.isEmpty(vos)){
            vos.stream().filter(o->!StringUtils.isEmpty(o.getContent())).map(MessageContentVO::getContent).forEach(o->{
                TFIDFAnalyzer tfidfAnalyzer = new TFIDFAnalyzer();
                List<Keyword> keywords = tfidfAnalyzer.analyze(o, 30);
                keywords.forEach(keyword -> {
                    Double aDouble = keywordMap.get(keyword.getName());
                    if(aDouble != null){
                        keywordMap.put(keyword.getName(),aDouble + keyword.getTfidfvalue());
                    }else {
                        keywordMap.put(keyword.getName(),keyword.getTfidfvalue());
                    }
                });
            });
        }
        if(!CollectionUtils.isEmpty(keywordMap)){
            list = keywordMap.entrySet().stream()
                    .sorted((entry1, entry2) -> entry2.getValue().compareTo(entry1.getValue()))
                    .limit(30)
                    .map(o->NameValueEntity.<String, Double>builder().name(o.getKey()).value(o.getValue()).build())
                    .collect(Collectors.toList());
        }
        return list;
    }

    /**
     * 标签分析
     *
     * @param reqParam 要求参数
     * @return {@link List}<{@link TrendAnalysisRespVO}>
     */
    @Override
    public List<TrendAnalysisRespVO> tagAnalysis(SubjectModelRequest reqParam) {
        List<TrendAnalysisRespVO> resultList = Lists.newArrayList();
        List<MessageContentVO> vos = getMessageContentVos(reqParam);
        if(!CollectionUtils.isEmpty(vos)){
            Map<String, List<MessageContentVO>> map = vos.stream()
                    .filter(o-> Objects.nonNull(o.getTime()) && !StringUtils.isEmpty(o.getLabel()))
                    .collect(Collectors.groupingBy(o -> DateTimeFormatter.ofPattern("yyyy-MM-dd").format(o.getTime())));
            map.keySet().forEach(year->{
                List<String> tags = Lists.newArrayList();
                map.get(year).forEach(o->tags.addAll(Arrays.asList(o.getLabel().split(","))));
                Map<String, Long> collect = tags.stream().collect(Collectors.groupingBy(String::trim, Collectors.counting()));
                collect.keySet().forEach(key->{
                    resultList.add(TrendAnalysisRespVO.builder().category(key).year(year).value(collect.get(key)).build());
                });
            });
        }
        return paddingList(resultList);
    }

    private List<TrendAnalysisRespVO> paddingList(List<TrendAnalysisRespVO> resultList) {
        List<TrendAnalysisRespVO> vos = Lists.newArrayList();
        if(!CollectionUtils.isEmpty(resultList)){
            Map<String, List<TrendAnalysisRespVO>> categoryMap = resultList.stream().collect(Collectors.groupingBy(TrendAnalysisRespVO::getCategory));
            categoryMap.keySet().forEach(category->{
                List<TrendAnalysisRespVO> list = categoryMap.get(category);
                list.sort(Comparator.comparing(TrendAnalysisRespVO::getYear));
                for(int i = 0; i< list.size(); i++){
                    TrendAnalysisRespVO currentVo = list.get(i);
                    LocalDate currentDate = DateTimeFormatterUtil.stringDateFormatterToLocalDate(currentVo.getYear());
                    vos.add(currentVo);
                    if(i != 0){
                        TrendAnalysisRespVO preVo = list.get(i - 1);
                        if(isIntervalGreaterThanOneDay(preVo, currentVo)){
                            vos.add(TrendAnalysisRespVO.builder().category(category).year(DateTimeFormatterUtil.localDateFormatterToStringDate(currentDate.plusDays(-1))).value(0L).build());
                        }
                    }
                    if(i != list.size() - 1){
                        TrendAnalysisRespVO nextVo = list.get(i + 1);
                        if(isIntervalGreaterThanOneDay(currentVo, nextVo)){
                            vos.add(TrendAnalysisRespVO.builder().category(category).year(DateTimeFormatterUtil.localDateFormatterToStringDate(currentDate.plusDays(1))).value(0L).build());
                        }
                    }
                }
            });
            vos.sort(Comparator.comparing(TrendAnalysisRespVO::getYear));
        }
        return vos;
    }

    /**
     * 是否间隔大于一天
     *
     * @param currentVo 目前签证官
     * @param nextVo    下一个签证官
     * @return boolean
     */
    private boolean isIntervalGreaterThanOneDay(TrendAnalysisRespVO currentVo, TrendAnalysisRespVO nextVo) {
        return DateTimeFormatterUtil.stringDateFormatterToLocalDate(currentVo.getYear()).plusDays(1).compareTo(DateTimeFormatterUtil.stringDateFormatterToLocalDate(nextVo.getYear())) < 0;
    }

}




