package com.shuangseqiu.service.impl;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.shuangseqiu.service.Cz89AnalysisService;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
 * cz89网站分析服务实现类
 */
@Service
public class Cz89AnalysisServiceImpl extends baseServiceImpl implements Cz89AnalysisService {

    private static final Logger logger = LoggerFactory.getLogger(Cz89AnalysisServiceImpl.class);



    @Autowired
    private HttpService httpService;

    @Autowired
    private ObjectMapper objectMapper;


    @Value("${prediction.data.path:./data}")
    private String dataPath;

    @Override
    public Map<String, Object> analyzeByPeriod(String period) {
        logger.info("开始分析cz89网站，预测期数: {}", period);

        Map<String, Object> result = new HashMap<>();
        result.put("period", period);
        result.put("analysisTime", new Date());

        try {
            // 构建URL
            String baseUrl = "https://m.cz89.com/ssq/item_8.htm";
            int pageIndex = 1;

            // 解析主页面获取相关的预测文章链接
            Map<String, Object> mainPageResult = parseCz89Page(baseUrl, period);
            //result.put("mainPage", mainPageResult);
            //获取到最大页面值
            int maxPage = 999;

            // 解析每篇文章并提取号码
            List<Map<String, Object>> articles = new ArrayList<>();
            Map<String, Integer> numberCountMapR = new ConcurrentHashMap<>();
            Map<String, List<String>> numberToArticlesMapR = new ConcurrentHashMap<>();
            Map<String, Integer> numberCountMapB = new ConcurrentHashMap<>();
            Map<String, List<String>> numberToArticlesMapB = new ConcurrentHashMap<>();

            //遍历每页解析文章内容
            while (pageIndex <= maxPage) {
// 提取预测文章列表
                List<String> articleLinks = (List<String>) mainPageResult.getOrDefault("articleLinks", new ArrayList<>());
                result.put("totalArticles", articleLinks.size());
                logger.info("第 {} 页 找到 {} 篇相关预测文章",pageIndex, articleLinks.size());
                if(articleLinks.size()==0){
                    break;
                }

                for (String link : articleLinks) {
                    try {
                        logger.info("解析文章: {}", link);
                        Map<String, Object> articleResult = parseArticleContent(link, period);
                        articles.add(articleResult);

                        // 提取文章中的号码
                        List<String> redBalls = (List<String>) articleResult.getOrDefault("redBalls", new ArrayList<>());
                        List<String> blueBalls = (List<String>) articleResult.getOrDefault("blueBalls", new ArrayList<>());

                        // 更新号码计数和关联
                        updateNumberStats(redBalls, "red", articleResult.get("title").toString(), numberCountMapR, numberToArticlesMapR);
                        updateNumberStats(blueBalls, "blue", articleResult.get("title").toString(), numberCountMapB, numberToArticlesMapB);
                    } catch (Exception e) {
                        logger.error("解析文章失败: {}", link, e);
                    }
                }

                //result.put("articles", articles);
                //获取下一页的文章列表
                pageIndex++;
                mainPageResult = parseCz89Page(baseUrl + "?p=" + pageIndex, period);
            }



            // 按预测文章数量倒序排序号码
            List<Map<String, Object>> sortedNumbersR = sortNumbersByCount(numberCountMapR,false);
            result.put("sortedRed", sortedNumbersR);
            //将红色号码，按排序后的顺序，以;分隔，拼接城字符串
            String redNumbers = sortedNumbersR.stream()
                    .map(number -> number.get("key").toString())
                    .collect(Collectors.joining(";"));
            result.put("sortedRedStr", redNumbers);
            List<Map<String, Object>> sortedNumbersB = sortNumbersByCount(numberCountMapB,true);
            result.put("sortedBlue", sortedNumbersB);
            String blueNumbers = sortedNumbersB.stream()
                    .map(number -> number.get("key").toString())
                    .collect(Collectors.joining(";"));
            result.put("sortedBlueStr", blueNumbers);

            // 保存结果
            saveResultToJson(result, period);

            logger.info("cz89网站分析完成，预测期数: {}", period);
            result.put("success", true);

        } catch (Exception e) {
            logger.error("分析cz89网站失败", e);
            result.put("error", e.getMessage());
            result.put("success", false);
        }

        return result;
    }

    @Override
    public Map<String, Object> parseCz89Page(String url, String period) {
        Map<String, Object> result = new HashMap<>();
        List<String> articleLinks = new ArrayList<>();

        try {
            // 获取页面内容
            String pageContent = httpService.getPageContent(url);
            Document document = Jsoup.parse(pageContent);

            // 提取相关的预测文章链接
            //这里修改为获取 class="commenList" 下的 a 标签
            Element commenList = document.select("ul.commenList").first();
            if (commenList != null) {
                Elements links = commenList.select("a");
                for (Element link : links) {
                    String href = link.attr("href");
                    String text = link.text();

                    // 筛选包含预测期数或相关关键词的链接
                    if (href != null && !href.isEmpty()) {
                        // 转换为完整URL
                        String fullUrl = href;
                        if (!href.startsWith("http")) {
                            fullUrl = "https://m.cz89.com" + href;
                        }

                        // 判断是否为预测文章链接
                        if (isPredictionArticleLink(href, text, period)) {
                            articleLinks.add(fullUrl);
                        }
                    }
                }
            }
            // 匹配 <div class="page"><a class="col" href="/ssq/item_8.htm">第一页</a><a class="col" href="/ssq/item_8.htm">上一页</a><a class="col" href="/ssq/item_8.htm?p=2">下一页</a><a class="col" href="/ssq/item_8.htm?p=12981 ">末尾页</a><span class="col current">当前1</span></div>用正则表达式  获取其中的最大页数 12891
//            Pattern PAGE_PATTERN = Pattern.compile("=(\\d+)[^=]*末");
//            String pageHtml = document.select("div.page").html();
//            Matcher matcher = PAGE_PATTERN.matcher(pageHtml);
//            if (matcher.find()) {
//                result.put("maxPage",matcher.group(1));
//            }



            // 去重
            // articleLinks = articleLinks.stream().distinct().collect(Collectors.toList());

            result.put("articleLinks", articleLinks);
            result.put("url", url);

        } catch (Exception e) {
            logger.error("解析cz89页面失败: {}", url, e);
            result.put("error", e.getMessage());
        }

        return result;
    }


    public void saveResultToJson(Map<String, Object> result, String period) {
        try {
            // 确保数据目录存在
            File directory = new File(dataPath);
            if (!directory.exists()) {
                Files.createDirectories(Paths.get(dataPath));
            }

            // 构建完整的文件路径
            String fileName = period+"_niucai_" + period;
            String filePath = Paths.get(dataPath, fileName + ".json").toString();

            // 保存JSON文件
            objectMapper.writerWithDefaultPrettyPrinter().writeValue(new File(filePath), result);

            logger.info("结果已保存到文件: {}", filePath);

        } catch (IOException e) {
            logger.error("保存结果到JSON文件失败", e);
            throw new RuntimeException("保存结果到JSON文件失败", e);
        }
    }

    /**
     * 解析文章内容，提取预测号码
     */
    private Map<String, Object> parseArticleContent(String url, String period) {
        Map<String, Object> result = new HashMap<>();
        result.put("url", url);

        try {
            String pageContent = httpService.getPageContent(url).replace("<br>", "</p><p>").replace("<br/>", "</p><p>").replace("<span", "<p").replace("span>", "p>");
            Document document = Jsoup.parse(pageContent);

            // 提取标题
            String title = document.title();
            result.put("title", title+"|"+url);
            //排除开机号分析
            if (title.contains("开机号分析")||title.contains("历史")) {
                return result;
            }

            // 获取预测内容 = document.select("article.article
            Element element = document.select("article.article").first();
            if(extractTongYongNumbers(element, result, period)){
                return result;
            }
            //整体分析
            if (title.contains("定蓝")) {
                List<String> jg = extractTuiJianNumbers(element, period);
                result.put("blueBalls",  jg);
                return result;
            }
            if (!title.contains("推荐总汇")&& !title.contains("推荐汇")){
                List<String> jg = extractTuiJianNumbers(element, period);
                result.put("redBalls",  jg);
                return result;
            }


            // 提取正文内容 <article class="article"> 的所有 p 标签,遍历匹配p的内容文本;
            Elements paragraphs = document.select("article.article p");
            for (Element paragraph : paragraphs) {
                String bodyText = paragraph.text();
                if (bodyText == null || bodyText.isEmpty()) {
                    continue;
                }

                logger.info("检索文章文字：{}", bodyText);
                // 提取红球号码
                List<String> redBalls = extractRedBalls(bodyText);
                result.put("redBalls", redBalls);

                // 提取蓝球号码
                List<String> blueBalls = extractBlueBalls(bodyText);
                result.put("blueBalls", blueBalls);

            }




        } catch (Exception e) {
            logger.error("解析文章内容失败: {}", url, e);
            result.put("error", e.getMessage());
        }

        return result;
    }


}