package org.chen.service;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.chen.config.CrawlerConfig;
import org.chen.factory.SpiderFactory;
import org.chen.handler.CrawlerWebSocketHandler;
import org.chen.model.dto.ActiveTasksDto;
import org.chen.model.dto.CrawlerTaskDTO;
import org.chen.model.elastic.CrawlerTask;
import org.chen.model.vo.CrawlerRuleVO;
import org.chen.model.vo.CrawlerTaskVO;
import org.chen.pipeline.*;
import org.chen.processor.*;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.data.domain.*;
import org.springframework.data.mongodb.core.FindAndModifyOptions;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.Pipeline;
import us.codecraft.webmagic.processor.PageProcessor;

import javax.annotation.PreDestroy;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

@Service
@Slf4j
public class CrawlerService {

    private final MongoTemplate mongoTemplate;
    private final SpiderFactory spiderFactory;
    private final ApplicationContext context;
    private final CrawlerWebSocketHandler webSocketHandler;
    private final Map<String, CrawlerContext> activeSpiders = new ConcurrentHashMap<>();
    private final ExecutorService executorService = Executors.newFixedThreadPool(5);

    public CrawlerService(
                          MongoTemplate mongoTemplate,
                          ApplicationContext context,
                          CrawlerWebSocketHandler webSocketHandler,
                          SpiderFactory spiderFactory) {
        this.mongoTemplate = mongoTemplate;
        this.spiderFactory = spiderFactory;
        this.context = context;
        this.webSocketHandler = webSocketHandler;
    }

    // 新增：爬虫上下文类
    @Data
    @AllArgsConstructor
    private static class CrawlerContext {
        private Spider spider;
        private AbstractESPNPageProcessor processor;
        private CrawlerService.CrawlerConfig config;
    }

    // 新增：爬虫配置类
    @Data
    @AllArgsConstructor
    private static class CrawlerConfig {
        private Boolean autoPublish;
        private Boolean crawlImages;
    }

    // 获取爬虫起始URL
    private String getStartUrl(String spiderName) {
        return switch (spiderName) {
            case "espnSoccerSpider", "soccer" -> "https://www.espn.com/soccer/";
            case "espnBasketballSpider", "basketball" -> "https://www.espn.com/basketball/";
            case "espnRugbySpider", "rugby" -> "https://www.espn.com/rugby/";
            case "espnBettingSpider", "sports-betting" -> "https://www.espn.com/sports-betting/";
            case "espnBasketballNBASpider", "nba" -> "https://www.espn.com/nba/";
            case "espnSoccerTransfersSpider", "transfers" -> "https://www.espn.com/soccer/transfers-news-and-features/";
            default -> throw new IllegalArgumentException("Unknown spider: " + spiderName);
        };
    }

    // 创建自定义任务
    @Transactional(rollbackFor = Exception.class)
    public String createTask(CrawlerTaskDTO dto) {
        CrawlerTask task = new CrawlerTask();
        BeanUtils.copyProperties(dto, task);
        task.setStatus("INIT");
        task.setRuleDescription(dto.getRuleDescription());
        task.setPageCount(0);
        task.setCreateTime(new Date());
        task.setUpdateTime(new Date());

        // 保存配置信息
        CrawlerConfig config = new CrawlerConfig(
                dto.getCrawlImages(),
                dto.getAutoPublish()
        );
        log.info("Creating task with config - crawlImages: {}, autoPublish: {}",
                dto.getCrawlImages(), dto.getAutoPublish());

        task = mongoTemplate.save(task);
        startTask(task, config);
        return task.getId();
    }

    // 启动任务
    private void startTask(CrawlerTask task, CrawlerConfig config) {
        AbstractESPNPageProcessor processor = createProcessor(task.getRuleId(), config);
        Pipeline pipeline = createPipeline(task.getRuleId(), config);

        Spider spider = spiderFactory.createSpider(
                getStartUrl(task.getRuleId()),
                processor,
                pipeline
        );

        // 存储爬虫上下文
        activeSpiders.put(task.getId(), new CrawlerContext(spider, processor, config));

        // 更新任务状态
        task.setStatus("RUNNING");
        task.setStartTime(new Date());
        mongoTemplate.save(task);

        // 启动进度更新定时器
        startProgressTimer(task.getId());

        // 异步执行爬虫任务
        executorService.execute(() -> {
            try {
                if (spider.getStatus() != Spider.Status.Stopped) { // 确保未停止状态
                    spider.runAsync();
                }
            } catch (Exception e) {
                log.error("Crawler task failed: {}", task.getId(), e);
                updateTaskStatus(task.getId(), "ERROR", e.getMessage());
            }
        });
    }

    // 新增：创建处理器方法
    private AbstractESPNPageProcessor createProcessor(String ruleId, CrawlerConfig config) {
        AbstractESPNPageProcessor processor = switch (ruleId) {
            case "basketball" -> new ESPNBasketBallPageProcessor();
            case "soccer" -> new ESPNSoccerPageProcessor();
            case "rugby" -> new ESPNRugbyProcessor();
            case "sports-betting" -> new ESPNBettingPageProcessor();
            case "nba" -> new ESPNBasketBallNBAProcessor();
            case "transfers" -> new ESPNSoccerTransfersProcessor();
            default -> throw new IllegalArgumentException("Unknown rule: " + ruleId);
        };
        processor.setCrawlImages(config.getCrawlImages());
        return processor;
    }

    // 新增：创建管道方法
    private Pipeline createPipeline(String ruleId, CrawlerConfig config) {
        Pipeline basePipeline = switch (ruleId) {
            case "basketball" -> context.getBean(ESPNBasketballPipeline.class);
            case "soccer" -> context.getBean(ESPNSoccerPipeline.class);
            case "rugby" -> context.getBean(ESPNRugbyPipeline.class);
            case "sports-betting" -> context.getBean(ESPNBettingPipeline.class);
            case "nba" -> context.getBean(ESPNNBAPipeline.class);
            case "transfers" -> context.getBean(ESPNSoccerTransfersPipeline.class);
            default -> throw new IllegalArgumentException("Unknown rule: " + ruleId);
        };

        AbstractESPNPipeline pipeline = (AbstractESPNPipeline) basePipeline;
        pipeline.setAutoPublish(config.getAutoPublish());
        pipeline.setCrawlImages(config.getCrawlImages());

        return basePipeline;
    }

    // 新增：进度更新定时器
    private void startProgressTimer(String taskId) {
        Timer timer = new Timer();
        timer.scheduleAtFixedRate(new TimerTask() {
            @Override
            public void run() {
                CrawlerContext context = activeSpiders.get(taskId);
                if (context != null && context.getProcessor() != null) {
                    int currentCount = context.getProcessor().getProcessedCount();
                    updateTaskProgress(taskId, currentCount);

                    // 如果任务已经完成或停止，取消定时器
                    if (context.getSpider().getStatus() != Spider.Status.Running) {
                        timer.cancel();
                    }
                }
            }
        }, 0, 1000); // 每秒更新一次
    }

    // 更新任务进度
    private void updateTaskProgress(String taskId, int pageCount) {
        Query query = Query.query(Criteria.where("id").is(taskId));
        Update update = Update.update("pageCount", pageCount)
                .set("updateTime", new Date());
        CrawlerTask task = mongoTemplate.findAndModify(query, update,
                FindAndModifyOptions.options().returnNew(true),
                CrawlerTask.class);

        // 通过WebSocket推送更新
        if (task != null) {
            CrawlerTaskVO taskVO = CrawlerTaskVO.builder()
                    .taskId(task.getId())
                    .name(task.getName())
                    .status(task.getStatus())
                    .pageCount(task.getPageCount())
                    .startTime(task.getStartTime())
                    .ruleDescription(task.getRuleDescription())
                    .endTime(task.getEndTime())
                    .errorMessage(task.getErrorMessage())
                    .build();

            webSocketHandler.broadcastTaskUpdate(taskId, taskVO);
        }
    }

    // 修改停止任务方法
    public void stopTask(String taskId) {
        CrawlerContext context = activeSpiders.get(taskId);
        if (context != null) {
            Spider spider = context.getSpider();

            if (spider.getStatus() == Spider.Status.Running || spider.getStatus() == Spider.Status.Init) {
                log.info("Stopping spider for task: {}", taskId);
                spider.stop(); // 停止爬虫任务

                // 更新任务状态
                updateTaskStatus(taskId, "STOPPED", null);

                // 从活动任务中移除
                activeSpiders.remove(taskId);
            } else {
                log.warn("Spider is not running. Current status: {}", spider.getStatus());
            }
        } else {
            log.warn("No active spider found for task: {}", taskId);
        }
    }

    // 更新任务状态
    public void updateTaskStatus(String taskId, String status, String errorMessage) {
        Query query = Query.query(Criteria.where("id").is(taskId));
        Update update = Update.update("status", status)
                .set("updateTime", new Date());

        if (status.equals("ERROR")) {
            update.set("endTime", new Date());
        }

        if (errorMessage != null) {
            update.set("errorMessage", errorMessage);
        }

        CrawlerTask task = mongoTemplate.findAndModify(query, update,
                FindAndModifyOptions.options().returnNew(true),
                CrawlerTask.class);

        // 通过WebSocket推送更新
        if (task != null) {
            CrawlerTaskVO taskVO = CrawlerTaskVO.builder()
                    .taskId(task.getId())
                    .name(task.getName())
                    .status(task.getStatus())
                    .pageCount(task.getPageCount())
                    .startTime(task.getStartTime())
                    .ruleDescription(task.getRuleDescription())
                    .endTime(task.getEndTime())
                    .errorMessage(task.getErrorMessage())
                    .build();

            webSocketHandler.broadcastTaskUpdate(taskId, taskVO);
        }
    }

    // 获取任务列表方法更新
    public Page<CrawlerTaskVO> getTasks(ActiveTasksDto activeTasksDto) {
        // 创建分页请求对象
        Pageable pageable = PageRequest.of(activeTasksDto.getPage(), activeTasksDto.getSize());

        // 构建查询条件
        Query query = new Query()
                .with(Sort.by(Sort.Direction.DESC, "createTime")) // 排序
                .with(pageable); // 分页

        List<CrawlerTask> tasks = mongoTemplate.find(query, CrawlerTask.class);

        // 统计总记录数（用于分页元数据）
        long total = mongoTemplate.count(query.skip(-1).limit(-1), CrawlerTask.class);

        // 转换为 VO 对象
        List<CrawlerTaskVO> taskVos = tasks.stream()
                .map(task -> {
                    CrawlerTaskVO vo = CrawlerTaskVO.builder()
                            .taskId(task.getId())
                            .name(task.getName())
                            .status(task.getStatus())
                            .pageCount(task.getPageCount())
                            .errorMessage(task.getErrorMessage())
                            .startTime(task.getStartTime())
                            .endTime(task.getEndTime())
                            .ruleDescription(task.getRuleDescription())
                            .build();

                    // 添加任务可用操作
                    vo.setCanDelete(true);
                    vo.setCanStop("RUNNING".equals(task.getStatus()));

                    return vo;
                })
                .collect(Collectors.toList());

        // 构建分页结果
        return new PageImpl<>(taskVos, pageable, total);
    }

    // 删除任务
    public void deleteTask(String taskId) {
        stopTask(taskId);
        // 从数据库中删除任务记录
        mongoTemplate.remove(Query.query(Criteria.where("id").is(taskId)), CrawlerTask.class);
    }

    @PreDestroy
    public void shutdownExecutor() {
        log.info("Shutting down ExecutorService...");
        executorService.shutdown();
        try {
            if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
                executorService.shutdownNow();
            }
        } catch (InterruptedException e) {
            executorService.shutdownNow();
            Thread.currentThread().interrupt();
        }
    }

}