package com.orange.project.news.service.impl;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.common.Term;
import com.orange.agent.rebangTody.RebangAgentApi;
import com.orange.agent.todayNews.TodayNewsAgentApi;
import com.orange.bean.core.constant.CacheConstants;
import com.orange.bean.core.constant.NewsConstants;
import com.orange.bean.rebangTody.dto.request.HotNewsReq;
import com.orange.bean.rebangTody.dto.response.HotNewsRes;
import com.orange.bean.rebangTody.po.NewsPo;
import com.orange.bean.rebangTody.vo.NewsListRes;
import com.orange.bean.todayNews.dto.request.NowNewsReq;
import com.orange.bean.todayNews.dto.response.NowNewsDto;
import com.orange.bean.todayNews.dto.response.NowNewsRes;
import com.orange.bean.todayNews.dto.response.WordCloudRes;
import com.orange.common.base.entity.RequestData;
import com.orange.common.base.entity.ResHeader;
import com.orange.common.base.entity.ResponseData;
import com.orange.common.base.service.impl.BaseServiceImpl;
import com.orange.common.enums.ResponseStatus;
import com.orange.common.enums.ResultCodeEnum;
import com.orange.common.exception.AgentApiException;
import com.orange.common.exception.BusinessException;
import com.orange.project.news.NewsHotspotAnalyzer;
import com.orange.project.news.mapper.NewsMapper;
import com.orange.project.news.service.NewsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
 * TODO
 *
 * @Author YWF
 * @Date 2025/8/11 11:43
 */
@Service
public class NewsServiceImpl extends BaseServiceImpl<NewsMapper, NewsPo> implements NewsService {

    protected final static Logger logger = LoggerFactory.getLogger(NewsServiceImpl.class);

    @Resource
    private RebangAgentApi newsAgentApi;
    private List<NowNewsDto> resData;

    @Override
    public HotNewsRes getNewsListForNet(HotNewsReq hotNewsReq) {
        RequestData<HotNewsReq> requestData = RequestData.of(hotNewsReq);
        ResponseData<HotNewsRes> responseData = newsAgentApi.getNewsList(requestData);

        if (responseData.getStatus() == ResponseStatus.SUCCESS) {
            HotNewsRes hotNewsRes = responseData.getBody();
            return hotNewsRes;
        }
        if (responseData.getStatus() == ResponseStatus.FAILD) {
            ResHeader resHeader = responseData.getHeader();
            throw new AgentApiException(resHeader);
        }
        throw new BusinessException(ResultCodeEnum.GET_NEWS_ERROR);
    }

    @Override
    public List<NewsListRes> getCacheNewsList() {
        List<NewsListRes> newsList = redisCache.lGet(CacheConstants.HOT_NEWS_LIST, 0, -1);
        logger.info("缓存中的新闻列表，数据：{}", newsList.size());
        return newsList;
    }

    @Resource
    private TodayNewsAgentApi todayNewsAgentApi;

    @Override
    public List<NewsListRes> getCacheNewsPageList(int pageNum, int pageSize) {
        pageSize = pageSize <= 0 ? 10 : pageSize;
        List<NewsListRes> newsList = redisCache.lGetPage(CacheConstants.HOT_NEWS_LIST, pageNum, pageSize);
        logger.info("缓存中的新闻列表，数据：{}", newsList.size());
        return newsList;
    }

    @Override
    public List<NewsListRes> getNowNewsList(String id) {
        NowNewsReq nowNewsReq = new NowNewsReq(id);
        RequestData<NowNewsReq> requestData = RequestData.of(nowNewsReq);
        ResponseData<NowNewsRes> responseData = todayNewsAgentApi.getNowNewsList(requestData);
        if (responseData.getStatus() == ResponseStatus.SUCCESS) {
            NowNewsRes hotNewsRes = responseData.getBody();
            String newsId = hotNewsRes.getNewsId();
            List<NowNewsDto> resData = hotNewsRes.getData();
            if (ObjectUtil.isNotNull(resData)) {
                List<NewsListRes> newNewsListRes = hotNewsRes.getData().stream().map(item -> {
                    NowNewsDto.Extra extra = item.getExtra();
                    NewsListRes newsListRes = new NewsListRes();
                    newsListRes.setIds(item.getId());
                    newsListRes.setTitle(item.getTitle());
                    newsListRes.setType(newsId);
                    newsListRes.setTypeN(NewsConstants.NEWS_TYPE.get(newsId));
                    newsListRes.setDetailUrl(item.getUrl());
                    if (ObjectUtil.isNotNull(extra)) {
                        newsListRes.setHotNum(item.getExtra().getHot());
                        newsListRes.setDesc(item.getExtra().getContent());
                    }
                    return newsListRes;
                }).collect(Collectors.toList());
                return newNewsListRes;
            }
        }
        if (responseData.getStatus() == ResponseStatus.FAILD) {
            ResHeader resHeader = responseData.getHeader();
            throw new AgentApiException(resHeader);
        }
        return Collections.emptyList();
    }

    @Override
    public List<NewsListRes> getCacheNowNewsList(String type, int pageNum, int pageSize) {
        List<NewsListRes> newsList = redisCache.lGetPage(NewsConstants.NOW_NEWS_LIST + type, pageNum, pageSize);
        logger.info("缓存中的新闻列表，数据：{}", newsList.size());
        return newsList;
    }

    @Override
    @Async("asyncServiceExecutor")
    public void initNowNewsToCache() {
        List<NewsListRes> newsAllList = new ArrayList<>();
        NewsConstants.NEWS_TYPE.forEach((key, value) -> {
            List<NewsListRes> newsResList = this.getNowNewsList(key);
            if (newsResList != null && newsResList.size() > 0) {
                // 计算排名靠前的热点新闻
                List<NewsListRes> hotRankNewsList = getHotNewsList(newsResList);
                newsAllList.addAll(hotRankNewsList);
                this.saveNewsListToCache(key, newsResList);
            }
        });
        this.saveNewsListToCache("all", sortNewsByHotNum(newsAllList));
    }

    private List<NewsListRes> sortNewsByHotNum(List<NewsListRes> newsList) {
        // 为每个项目计算热度值
        for (NewsListRes item : newsList) {
            item.setHotNumValue(convertHotNum(item.getHotNum()));
        }
        // 按热度值降序排序
        newsList.sort((a, b) -> Double.compare(b.getHotNumValue(), a.getHotNumValue()));
        return newsList;
    }

    private static double convertHotNum(String hotNum) {
        if (hotNum == null || hotNum.trim().isEmpty()) {
            return 0;
        }
        // 处理各种格式： "23.6万热度", "230000", "45.8w", "123万" 等
        Pattern pattern = Pattern.compile("(\\d+(?:\\.\\d+)?)\\s*(万|w|W)?\\s*[热度]*");
        Matcher matcher = pattern.matcher(hotNum);

        if (matcher.find()) {
            double number = Double.parseDouble(matcher.group(1));
            String unit = matcher.group(2);

            // 如果有"万"或"w"单位，乘以10000
            if (unit != null && (unit.equals("万") || unit.equalsIgnoreCase("w"))) {
                return number * 10000;
            }
            return number;
        }
        // 如果正则匹配失败，尝试直接解析数字
        try {
            return Double.parseDouble(hotNum.replaceAll("[^\\d.]", ""));
        } catch (NumberFormatException e) {
            return 0;
        }
    }

    /**
     * 缓存数据
     *
     * @param key
     * @param newsResList
     */
    private void saveNewsListToCache(String key, List<NewsListRes> newsResList) {
        String newCacheKey = NewsConstants.NOW_NEWS_LIST + key;
        long cacheSize = redisCache.lGetListSize(newCacheKey);
        if (cacheSize > 0) {
            redisCache.delete(newCacheKey);
        }
        redisCache.lSet(newCacheKey, newsResList);
    }

    /**
     * 计算排名靠前的热点新闻
     *
     * @param newsResList
     * @return
     */
    private List<NewsListRes> getHotNewsList(List<NewsListRes> newsResList) {
        List<NewsListRes> resultList = newsResList.stream().limit(5).collect(Collectors.toList());
        return resultList;
    }

    @Override
    public List<WordCloudRes> getHotWordCloudForCache() {
        try {
            // 2. 收集所有 keyword 文本
            List<NewsListRes> newsAllList = this.getWordCloudData();

            if (CollectionUtil.isEmpty(newsAllList)){
                return Collections.emptyList();
            }
            // 3. 使用 HanLP 分词 + 词频统计
            Map<String, Integer> keywordFrequencyMap = new HashMap<>();
            for (NewsListRes news : newsAllList) {
                String content = news.getTitle() + (StrUtil.isEmpty(news.getDesc()) ? "" : news.getDesc());
                List<Term> terms = HanLP.segment(content);
                List<String> meaningfulWords = terms.stream()
                        .filter(term -> !isPunctuation(term.nature.toString()))
                        .filter(term -> !isStopWord(term.word))
                        .filter(term -> isMeaningfulWord(term.nature.toString()))
                        .filter(term -> term.word.length() > 1)
                        .map(term -> term.word)
                        .collect(Collectors.toList());
                for (String word : meaningfulWords) {
                    keywordFrequencyMap.put(word,
                            keywordFrequencyMap.getOrDefault(word, 0) + 1);
                }
            }
            // 4. 生成词云数据（取前 500 个）
            List<WordCloudRes> cloud = generateWordCloudData(keywordFrequencyMap);
            return cloud;
        } catch (Exception e) {
            log.error("生成词云时发生异常", e);
            return Collections.emptyList();
        }
    }

    /**
     * 获取所有新闻数据
     * @return
     */
    private List<NewsListRes> getWordCloudData() {
        List<NewsListRes> newsAllList = new ArrayList<>();
        NewsConstants.NEWS_TYPE.forEach((key, value)->{
            List<NewsListRes> newsList = redisCache.lGet(NewsConstants.NOW_NEWS_LIST + key, 0, -1);
            newsAllList.addAll(newsList);
        });
        return newsAllList;
    }

    /*------------------------- 辅助方法 -------------------------*/

    /**
     * 过滤标点
     * @param nature
     * @return
     */
    private boolean isPunctuation(String nature) {
        return nature.startsWith("w");
    }

    /**
     * 过滤停用词
     * @param word
     * @return
     */
    private boolean isStopWord(String word) {
        Set<String> stopWords = new HashSet<>(Arrays.asList("的", "了", "在", "是", "我", "有", "和", "就",
                "不", "人", "都", "一", "一个", "上", "也", "很",
                "到", "说", "要", "去", "你", "会", "着", "没有",
                "看", "好", "自己", "这"));
        return stopWords.contains(word);
    }

    /**
     * 保留名词、动词
     * @param nature
     * @return
     */
    private boolean isMeaningfulWord(String nature) {
        return nature.startsWith("n") ||  // 名词
                nature.startsWith("v") ||  // 动词
                nature.startsWith("a") ||  // 形容词
                nature.startsWith("nr") ||
                nature.startsWith("ns") ||
                nature.startsWith("nt") ||
                nature.startsWith("nz");
    }

    /**
     * 按词频排序并取前 500
     */
    private List<WordCloudRes> generateWordCloudData(Map<String, Integer> freq) {

        List<Map.Entry<String, Integer>> topEntries = freq.entrySet().stream()
                .sorted((a, b) -> b.getValue().compareTo(a.getValue()))
                .limit(500)
                .collect(Collectors.toList());  // Java 16+

        List<WordCloudRes> list = new ArrayList<>();

        for (Map.Entry<String, Integer> entry : topEntries) {
            list.add(new WordCloudRes(
                    entry.getKey(),
                    entry.getValue(),
                    calculateFontSize(entry.getValue())
            ));
        }

        return list;
    }

    private int calculateFontSize(int frequency) {
        int minSize = 12;
        int maxSize = 40;
        int maxFreq = 100; // 频率超过100按100算

        return (int) ((double) Math.min(frequency, maxFreq) / maxFreq *
                (maxSize - minSize) + minSize);
    }

    @Override
    public List<WordCloudRes> getHotWordCloudForCacheV2() {
        // 1. 创建分析器
        NewsHotspotAnalyzer analyzer = new NewsHotspotAnalyzer();

        // 2. 准备新闻数据（模拟100条新闻）
        List<NewsListRes> newsList = this.getWordCloudData();
        List<NewsHotspotAnalyzer.NewsItem> newsItems = newsList.stream().map(news -> new NewsHotspotAnalyzer.NewsItem(news.getIds(), news.getTitle(), news.getDesc(),new Date())).collect(Collectors.toList());
        // 3. 添加新闻数据
        analyzer.addNews(newsItems);

        System.out.println("总新闻数量: " + analyzer.getTotalNewsCount());
        System.out.println("总词汇数量: " + analyzer.getTotalVocabularySize());

        // 4. 获取前20个热点词汇
        List<NewsHotspotAnalyzer.HotspotResult> hotspots = analyzer.getHotspotWords(1020);

        System.out.println("\n=== 热点词汇TOP20 ===");
        for (NewsHotspotAnalyzer.HotspotResult hotspot : hotspots) {
            System.out.println(hotspot);
        }

        // 5. 通过热点词汇定位新闻
        System.out.println("\n=== 通过热点词'科技'定位新闻 ===");
        Optional<NewsHotspotAnalyzer.HotspotResult> techResult =
                analyzer.getHotspotResultByWord("美国");

        techResult.ifPresent(result -> {
            System.out.println("词汇: " + result.getWord() + ", 出现次数: " + result.getFrequency());
            System.out.println("相关新闻:");
            result.getRelatedNews().forEach(news ->
                    System.out.println("  - " + news.getTitle())
            );
        });
        return Collections.emptyList();
    }
}
