package org.dromara.crawler.mongo.mongoservice.impl;

import cn.hutool.core.util.ObjectUtil;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.mongodb.BasicDBObject;
import com.mongodb.client.AggregateIterable;
import com.mongodb.client.MongoCollection;
import com.mongoplus.aggregate.Aggregate;
import com.mongoplus.aggregate.AggregateWrapper;
import com.mongoplus.aggregate.pipeline.Accumulators;
import com.mongoplus.conditions.interfaces.ConditionOperators;
import com.mongoplus.conditions.interfaces.Projection;
import com.mongoplus.conditions.query.LambdaQueryChainWrapper;
import com.mongoplus.mapping.TypeReference;
import com.mongoplus.service.impl.ServiceImpl;
import lombok.RequiredArgsConstructor;
import org.bson.Document;
import org.dromara.common.mybatis.core.page.PageQuery;
import org.dromara.crawler.domain.bo.DataCollectionBo;
import org.dromara.crawler.domain.vo.DataSource;
import org.dromara.crawler.mongo.mongodomain.*;
import org.dromara.crawler.mongo.mongoservice.*;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Service;
import org.apache.commons.lang3.StringUtils;
import java.time.LocalDate;
import java.time.YearMonth;
import java.time.ZoneId;
import java.util.*;
import java.util.stream.Collectors;

@Service
@RequiredArgsConstructor
public class XinHuaNetServiceImpl extends ServiceImpl<XinHuaNet> implements XinHuaNetService {

    private final CNNService cnnService;
    private final WikiService wikiService;
    private final MongoTemplate mongoTemplate;
    private final HotspotEventService hotspotEventService;

    @Override
    public List<XinHuaNet> queryByLabelAndKeyword(String label, String keyword) {
        LambdaQueryChainWrapper<XinHuaNet> queryChainWrapper = this.lambdaQuery();
        queryChainWrapper.eq(XinHuaNet::getLabel, label)
            .eq(XinHuaNet::getKeyword, keyword);
        return this.list(queryChainWrapper);
    }

    @Override
    public List<XinHuaNet> getAll() {
        return this.list();
    }

    @Override
    public List<XinHuaNet> queryXinHuaByLabel(String label) {
        return this.lambdaQuery().eq(XinHuaNet::getLabel, label).list();
    }

    @Override
    public Page<DataSource> queryByPage(DataCollectionBo dataCollectionBo, PageQuery pageQuery) {
        if (pageQuery == null) {
            pageQuery = new PageQuery();
        }
        // 设置默认值
        if (pageQuery.getPageNum() == null || pageQuery.getPageNum() <= 0) {
            pageQuery.setPageNum(1); // 默认第一页
        }
        if (pageQuery.getPageSize() == null || pageQuery.getPageSize() <= 0) {
            pageQuery.setPageSize(10); // 默认每页10条
        }
        Page<DataSource> page = pageQuery.build();
        List<Map<String, Object>> countResult = null;
        List<DataSource> aggregateList = null;
        AggregateWrapper aggregateWrapper = new AggregateWrapper()
            .match(createWrapper(dataCollectionBo))
            .addFields(new BasicDBObject("source_type", "1"))
            .project(Projection.builder()
                .display("_id", "title", "source", "time", "keyword", "crawl_time", "status1", "status2", "source_type")
                .buildList());

        AggregateWrapper cnnMatch = new AggregateWrapper()
            .match(cnnService.createWrapper(dataCollectionBo))
            .addFields(new BasicDBObject("source_type", "2"))
            .project(Projection.builder()
                .display("_id", "title", "source", "time", "keyword", "crawl_time", "status1", "status2", "source_type")
                .buildList());

        AggregateWrapper wikiMatch = new AggregateWrapper()
            .match(wikiService.createWrapper(dataCollectionBo))
            .addFields(new BasicDBObject("source_type", "3"))
            .project(Projection.builder()
                .display("_id", "title", "source", "time", "keyword", "crawl_time", "status1", "status2", "source_type")
                .buildList());

        if (ObjectUtil.isEmpty(dataCollectionBo.getSource())) {
            // 获取总条数
            AggregateWrapper countWrapper = new AggregateWrapper()
                .match(createWrapper(dataCollectionBo))
                .unionWith("CNN", cnnMatch)
                .unionWith("Wiki", wikiMatch)
                .count("total");
            countResult = execute("XinHua_Net", countWrapper);
            // 执行分页查询
            aggregateWrapper.unionWith("CNN", cnnMatch).unionWith("Wiki", wikiMatch)
                .skip((pageQuery.getPageNum() - 1) * pageQuery.getPageSize())
                .limit(pageQuery.getPageSize());
            aggregateList = execute("XinHua_Net", aggregateWrapper, DataSource.class);
        } else {
            switch (dataCollectionBo.getSource()) {
                case "1" -> {
                    // 获取总条数
                    AggregateWrapper countWrapper = new AggregateWrapper()
                        .match(createWrapper(dataCollectionBo))
                        .count("total");
                    countResult = execute("XinHua_Net", countWrapper);
                    // 执行分页查询
                    aggregateWrapper.skip((pageQuery.getPageNum() - 1) * pageQuery.getPageSize()).limit(pageQuery.getPageSize());
                    aggregateList = execute("XinHua_Net", aggregateWrapper, DataSource.class);
                }
                case "2" -> {
                    // 获取总条数
                    AggregateWrapper countWrapper = new AggregateWrapper()
                        .match(cnnService.createWrapper(dataCollectionBo))
                        .count("total");
                    countResult = execute("CNN", countWrapper);
                    // 执行分页查询
                    cnnMatch.skip((pageQuery.getPageNum() - 1) * pageQuery.getPageSize()).limit(pageQuery.getPageSize());
                    aggregateList = execute("CNN", cnnMatch, DataSource.class);
                }
                case "3" -> {
                    // 获取总条数
                    AggregateWrapper countWrapper = new AggregateWrapper()
                        .match(wikiService.createWrapper(dataCollectionBo))
                        .count("total");
                    countResult = execute("Wiki", countWrapper);
                    // 执行分页查询
                    wikiMatch.skip((pageQuery.getPageNum() - 1) * pageQuery.getPageSize()).limit(pageQuery.getPageSize());
                    aggregateList = execute("Wiki", wikiMatch, DataSource.class);
                }
            }
        }
               long totalCount = (countResult == null || countResult.isEmpty()) ? 0 : Long.parseLong(countResult.get(0).get("total").toString());
        if (aggregateList != null) {
            aggregateList.stream().forEach(item -> item.setId(item.get_id()));
        }

        page.setRecords(aggregateList);
        page.setTotal(totalCount);  // 设置总条数
        return page;

    }

    @Override
    public XinHuaNet queryById(String id) {
        return this.getById(id);
    }

    private List<Map<String, Object>> execute(String collectionName, Aggregate<?> aggregate) {
        return baseMapper.aggregateList(collectionName, aggregate, new TypeReference<Map<String, Object>>() {
        });
    }


    private <T> List<T> execute(String collectionName, Aggregate<?> aggregate, Class<T> clazz) {
        return baseMapper.aggregateList(collectionName, aggregate, clazz);
    }

    private LambdaQueryChainWrapper createWrapper(DataCollectionBo dataCollectionBo) {
        LambdaQueryChainWrapper<XinHuaNet> wrapper = this.lambdaQuery();
        wrapper.like(ObjectUtil.isNotEmpty(dataCollectionBo.getEventFirstLevel()), XinHuaNet::getEventFirstLevel, dataCollectionBo.getEventFirstLevel());
        wrapper.like(ObjectUtil.isNotEmpty(dataCollectionBo.getKeyword()), XinHuaNet::getKeyword, dataCollectionBo.getKeyword());
        wrapper.gte(ObjectUtil.isNotEmpty(dataCollectionBo.getStartTime()), XinHuaNet::getTime, dataCollectionBo.getStartTime());
        wrapper.lte(ObjectUtil.isNotEmpty(dataCollectionBo.getEndTime()), XinHuaNet::getTime, dataCollectionBo.getEndTime());
        wrapper.like(ObjectUtil.isNotEmpty(dataCollectionBo.getTitle()), XinHuaNet::getTitle, dataCollectionBo.getTitle());
        wrapper.like(ObjectUtil.isNotEmpty(dataCollectionBo.getStatus1()), XinHuaNet::getStatus1, dataCollectionBo.getStatus1());
        wrapper.like(ObjectUtil.isNotEmpty(dataCollectionBo.getStatus2()), XinHuaNet::getStatus2, dataCollectionBo.getStatus2());
        wrapper.orderByDesc(XinHuaNet::getCrawlTime);
        return wrapper;
    }


    //知识库数据统计
    public Long countAll(KnowledgeTargetService knowledgeTargetService) {
        return knowledgeTargetService.count();
    }

    public Long countAll(KnowledgeEventService knowledgeEventService) {
        return knowledgeEventService.count();
    }

    @Override
    public Long getHotspotEventCount() {
        return hotspotEventService.count();
    }

    // 站点数据统计
    @Override
    public Long getTotalByCNN_XinHuaNet_Wiki() {
        return this.count() + cnnService.count() + wikiService.count();
    }

    //知识库+数据统计+热点
    @Override
    public Map<String, Long> getCombinedStats(KnowledgeTargetService knowledgeTargetService,
                                              KnowledgeEventService knowledgeEventService, String keyword) {
        Map<String, Long> stats = new HashMap<>();

        // Target有知识库统计
        Long originalKnowledgeCount;
        if (keyword != null && !keyword.isEmpty()) {
            originalKnowledgeCount = knowledgeTargetService.lambdaQuery()
                .like(KnowledgeTarget::getName, keyword)
                .count();
        } else {
            originalKnowledgeCount = knowledgeTargetService.count();
        }

        // Event知识库统计
        Long eventKnowledgeCount;
        if (keyword != null && !keyword.isEmpty()) {
            eventKnowledgeCount = knowledgeEventService.lambdaQuery()
                .like(KnowledgeEvent::getEventName, keyword)
                .count();
        } else {
            eventKnowledgeCount = knowledgeEventService.count();
        }

        // 总知识库数量
        stats.put("knowledgeBaseCount", originalKnowledgeCount + eventKnowledgeCount);

        // 站点数据统计
        if (keyword != null && !keyword.isEmpty()) {
            Long xinhuaCount = this.lambdaQuery()
                .like(XinHuaNet::getKeyword, keyword)
                .count();

            Long cnnCount = cnnService.lambdaQuery()
                .like(CNN::getKeyword, keyword)
                .count();

            Long wikiCount = wikiService.lambdaQuery()
                .like(Wiki::getKeyword, keyword)
                .count();

            stats.put("siteDataCount", xinhuaCount + cnnCount + wikiCount);
        } else {
            stats.put("siteDataCount", this.getTotalByCNN_XinHuaNet_Wiki());
        }

        //热点事件数据统计
        Long hotspotEventCount;
        if (keyword != null && !keyword.isEmpty()) {
            hotspotEventCount = hotspotEventService.lambdaQuery()
                .like(HotspotEvent::getKeywords, keyword)
                .count();
        } else {
            hotspotEventCount = this.getHotspotEventCount();
        }
        stats.put("hotspotEventCount", hotspotEventCount);

        return stats;
    }
//重载
    @Override
    public Map<String, Long> getCombinedStats(KnowledgeTargetService knowledgeTargetService,
                                              KnowledgeEventService knowledgeEventService) {
        return getCombinedStats(knowledgeTargetService, knowledgeEventService, null);
    }

    @Override
    public Map<String, Long> getMonthlyStats() {
        Map<String, Long> result = new LinkedHashMap<>();

        // 站点数据月度统计
        Map<String, Long> siteStats = getSiteMonthlyStats();
        result.putAll(siteStats);

        // 知识库数据月度统计
        Map<String, Long> knowledgeStats = getKnowledgeMonthlyStats();
        result.putAll(knowledgeStats);

        return result;
    }
    //重载
    @Override
    public Object getMonthStatistics() {
        return getMonthStatistics(null);
    }


    @Override
    public Object getMonthStatistics(String keyword) {
        List<String> yearMonthList = new ArrayList<>();
        YearMonth thisMonth = YearMonth.now();
        for (int i = 11; i >= 0; i--) {
            yearMonthList.add(thisMonth.minusMonths(i).toString()); // e.g., "2024-08"
        }
        LocalDate today = LocalDate.now();
        LocalDate monthsAgo = YearMonth.from(today.minusMonths(11)).atDay(1);
        Date startDate = Date.from(monthsAgo.atStartOfDay(ZoneId.systemDefault()).toInstant());
        Date endDate = Date.from(today.withDayOfMonth(today.lengthOfMonth()).atTime(23, 59, 59).atZone(ZoneId.systemDefault()).toInstant());
        // XinHuaNet数据
        AggregateWrapper xinHuaAggregate = new AggregateWrapper();
        LambdaQueryChainWrapper<XinHuaNet> wrapper = this.lambdaQuery();
        wrapper.lt(XinHuaNet::getTime, endDate);
        wrapper.gte(XinHuaNet::getTime, startDate);
        if (StringUtils.isNotBlank(keyword)) {
            wrapper.like(XinHuaNet::getKeyword, keyword);
        }
        // CNN数据
        AggregateWrapper cnnAggregate = new AggregateWrapper();
        LambdaQueryChainWrapper<CNN> cnnWrapper = cnnService.lambdaQuery();
        cnnWrapper.lt(CNN::getTime, endDate);
        cnnWrapper.gte(CNN::getTime, startDate);
        if (StringUtils.isNotBlank(keyword)) {
            cnnWrapper.like(CNN::getKeyword, keyword);
        }
        cnnAggregate.match(cnnWrapper);
        // Wiki数据
        AggregateWrapper wikiAggregate = new AggregateWrapper();
        LambdaQueryChainWrapper<Wiki> wikiWrapper = wikiService.lambdaQuery();
        wikiWrapper.lt(Wiki::getTime, endDate);
        wikiWrapper.gte(Wiki::getTime, startDate);
        if (StringUtils.isNotBlank(keyword)) {
            wikiWrapper.like(Wiki::getKeyword, keyword);
        }
        wikiAggregate.match(wikiWrapper);

        xinHuaAggregate.unionWith("CNN", cnnAggregate).unionWith("Wiki", wikiAggregate);
        xinHuaAggregate.match(wrapper).group(ConditionOperators.dateToString("%Y-%m", "$time"), Accumulators.sum());
        xinHuaAggregate.sortAsc("_id");
        List<Document> documentList = execute("XinHua_Net", xinHuaAggregate,Document.class);

        // 分别转换三组聚合结果为 Map<年月, count>
        Map<String, Integer> mainCountMap = documentList.stream()
            .collect(Collectors.toMap(
                d -> d.getString("_id"),
                d -> d.getInteger("count", d.getInteger("sum", 0))
            ));

        //热点数据
        Map<String, Integer> hotspotCountMap = hotspotEventService.getMonthStatistics(keyword);

        // 构建分别输出的结果
        Map<String, Object> result = new HashMap<>();
        result.put("mainData", yearMonthList.stream()
            .map(ym -> new MonthCountVo(ym, mainCountMap.getOrDefault(ym, 0)))
            .collect(Collectors.toList()));
        result.put("hotspotData", yearMonthList.stream()
            .map(ym -> new MonthCountVo(ym, hotspotCountMap.getOrDefault(ym, 0)))
            .collect(Collectors.toList()));
        return result;
    }


    private Map<String, Long> getSiteMonthlyStats() {
        Map<String, Long> stats = new LinkedHashMap<>();

        // 统计XinHuaNet数据
        addCollectionMonthlyStats(stats, "XinHua_Net");
        // 统计CNN数据
        addCollectionMonthlyStats(stats, "CNN");
        // 统计Wiki数据
        addCollectionMonthlyStats(stats, "Wiki");

        return stats;
    }

    private Map<String, Long> getKnowledgeMonthlyStats() {
        Map<String, Long> stats = new LinkedHashMap<>();

        // 统计KnowledgeTarget数据
        addCollectionMonthlyStats(stats, "knowledge_target");
        // 统计KnowledgeEvent数据
        addCollectionMonthlyStats(stats, "knowledge_event");

        return stats;
    }

    private void addCollectionMonthlyStats(Map<String, Long> stats, String collectionName) {
        List<org.bson.conversions.Bson> pipeline = new ArrayList<>();

        // 计算6个月前的日期
        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.MONTH, -6);
        Date sixMonthsAgo = calendar.getTime();
        System.out.println("sixMonthsAgo: " + sixMonthsAgo);

        // 步骤1：过滤最近6个月的数据
        pipeline.add(
            new Document("$match",
                new Document("time",
                    new Document("$gte", sixMonthsAgo)
                )
            )
        );

        // 步骤2：提取月份字段
        pipeline.add(
            new Document("$project",
                new Document("month",
                    new Document("$month", "$time")
                )
            )
        );

        // 步骤3：按月份分组统计
        pipeline.add(
            new Document("$group",
                new Document("_id", "$month")
                    .append("count", new Document("$sum", 1))
            )
        );

        MongoCollection<Document> collection = mongoTemplate.getCollection(collectionName);
        AggregateIterable<Document> aggregate = collection.aggregate(pipeline);

        for (Document doc : aggregate) {
            System.out.println("聚合结果: " + doc.toJson());
            Integer month = doc.getInteger("_id");
            Long count = doc.getLong("count");
            if (month != null && count != null) {
                String monthKey = month + "月";
                stats.put(monthKey, stats.getOrDefault(monthKey, 0L) + count);
            }
        }
    }

    public class MonthCountVo {
        private String month; // "2025-01"
        private Integer count;

        public MonthCountVo(String month, Integer count) {
            this.month = month;
            this.count = count;
        }

        // getters and setters


        public String getMonth() {
            return month;
        }

        public void setMonth(String month) {
            this.month = month;
        }

        public Integer getCount() {
            return count;
        }

        public void setCount(Integer count) {
            this.count = count;
        }
    }

}


