package com.ningxun.pagentcrawler.service.task.impl;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.ningxun.pagentcrawler.entity.CrawlerConfig;
import com.ningxun.pagentcrawler.entity.CrawlerTask;
import com.ningxun.pagentcrawler.entity.task.CrawlerTaskEntity;
import com.ningxun.pagentcrawler.entity.policy.PolicyItem;
import com.ningxun.pagentcrawler.mapper.task.CrawlerTaskMapper;
import com.ningxun.pagentcrawler.service.config.ICrawlerConfigService;
import com.ningxun.pagentcrawler.service.core.ICrawlerService;
import com.ningxun.pagentcrawler.service.engine.UniversalCrawlerEngine;
import com.ningxun.pagentcrawler.service.task.ICrawlerTaskService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import com.ningxun.pagentcrawler.dto.CrawlerResult;

/**
 * 爬虫任务服务实现类
 */
@Service
public class CrawlerTaskServiceImpl implements ICrawlerTaskService {
    
    @Autowired
    private ICrawlerConfigService crawlerConfigService;
    
    @Autowired
    private UniversalCrawlerEngine universalCrawlerEngine;
    
    @Autowired
    private CrawlerTaskMapper crawlerTaskMapper;

    @Autowired
    private ICrawlerService crawlerService;
    
    // 任务管理器：跟踪正在运行的任务
    private final ConcurrentHashMap<String, AtomicBoolean> runningTasks = new ConcurrentHashMap<>();
    
    @Override
    public List<CrawlerTask> getAllTasks() {
        try {
            List<CrawlerTaskEntity> entities = crawlerTaskMapper.selectList(new QueryWrapper<>());
            List<CrawlerTask> tasks = new ArrayList<>();
            for (CrawlerTaskEntity entity : entities) {
                CrawlerTask task = entity.toCrawlerTask();
                // 检查任务是否真的在运行
                if ("RUNNING".equals(task.getStatus()) && !runningTasks.containsKey(task.getId())) {
                    // 如果数据库显示运行中但实际不在运行，更新状态
                    task.setStatus("FAILED");
                    task.setErrorMessage("任务异常终止");
                    updateTaskInDatabase(task);
                }
                tasks.add(task);
            }
            return tasks;
        } catch (Exception e) {
            System.err.println("获取任务列表失败: " + e.getMessage());
            return new ArrayList<>();
        }
    }
    
    @Override
    public List<CrawlerTaskEntity> getAllTasks(CrawlerTask task) {
        try {
            LambdaQueryWrapper<CrawlerTaskEntity> wrapper = new LambdaQueryWrapper<>();
            wrapper
                    .like(task != null && task.getTaskName() != null && !task.getTaskName().isEmpty(), 
                          CrawlerTaskEntity::getTaskName, task.getTaskName())
                    .like(task != null && task.getConfigName() != null && !task.getConfigName().isEmpty(), 
                          CrawlerTaskEntity::getConfigName, task.getConfigName())
                    .eq(task != null && task.getStatus() != null && !task.getStatus().isEmpty(), 
                        CrawlerTaskEntity::getStatus, task.getStatus())
                    .orderByDesc(CrawlerTaskEntity::getCreateTime);

            // 查询（会自动分页）
            List<CrawlerTaskEntity> list = crawlerTaskMapper.selectList(wrapper);
            
            // 检查并修正异常的任务状态
            for (CrawlerTaskEntity entity : list) {
                if ("RUNNING".equals(entity.getStatus()) && !runningTasks.containsKey(entity.getId())) {
                    // 如果数据库显示运行中但实际不在运行，更新状态
                    entity.setStatus("FAILED");
                    entity.setErrorMessage("任务异常终止");
                    crawlerTaskMapper.updateById(entity);
                }
            }
            
            return list;
        } catch (Exception e) {
            System.err.println("查询任务列表失败: " + e.getMessage());
            e.printStackTrace();
            return new ArrayList<>();
        }
    }
    
    @Override
    public CrawlerTask getTaskById(String taskId) {
        try {
            CrawlerTaskEntity entity = crawlerTaskMapper.selectById(taskId);
            CrawlerTask task = entity != null ? entity.toCrawlerTask() : null;
            if (task != null && "RUNNING".equals(task.getStatus()) && !runningTasks.containsKey(taskId)) {
                // 如果数据库显示运行中但实际不在运行，更新状态
                task.setStatus("FAILED");
                task.setErrorMessage("任务异常终止");
                updateTaskInDatabase(task);
            }
            return task;
        } catch (Exception e) {
            System.err.println("获取任务失败: " + e.getMessage());
            return null;
        }
    }
    
    @Override
    public CrawlerTask startTask(String configId, Map<String, Object> params) {
        CrawlerTask task = new CrawlerTask();
        task.setId("task_" + System.currentTimeMillis());
        task.setTaskName("爬虫任务_" + System.currentTimeMillis());
        task.setConfigId(configId);
        task.setStatus("RUNNING");
        task.setStartTime(LocalDateTime.now());
        task.setCreateTime(LocalDateTime.now());
        task.setCreateBy("admin");
        
        // 获取配置信息
        CrawlerConfig config = crawlerConfigService.getConfigById(configId);
        if (config != null) {
            task.setConfigName(config.getConfigName());
        }
        
        // 保存到数据库
        CrawlerTaskEntity entity = CrawlerTaskEntity.fromCrawlerTask(task);
        crawlerTaskMapper.insert(entity);
        
        // 注册到运行任务管理器
        runningTasks.put(task.getId(), new AtomicBoolean(true));
        
        // 异步执行爬虫任务
        CompletableFuture.runAsync(() -> {
            try {
                // 设置停止检查函数
                universalCrawlerEngine.setStopCheckFunction(() -> shouldStop(task.getId()));
                executeCrawlerTask(task, config, params);
            } catch (Exception e) {
                task.setStatus("FAILED");
                task.setErrorMessage(e.getMessage());
                System.err.println("爬虫任务执行失败: " + e.getMessage());
            } finally {
                task.setEndTime(LocalDateTime.now());
                if (task.getStartTime() != null) {
                    task.setDuration(ChronoUnit.MILLIS.between(task.getStartTime(), task.getEndTime()));
                }
                // 更新数据库
                updateTaskInDatabase(task);
                // 从运行任务管理器中移除
                runningTasks.remove(task.getId());
            }
        });
        
        return task;
    }
    
    @Override
    public void stopTask(String taskId) {
        try {
            // 检查任务是否真的在运行
            AtomicBoolean runningFlag = runningTasks.get(taskId);
            if (runningFlag != null && runningFlag.get()) {
                // 设置停止标志
                runningFlag.set(false);
                System.out.println("✅ 已设置任务停止标志: " + taskId);
            }
            
            // 更新数据库状态
            CrawlerTaskEntity entity = crawlerTaskMapper.selectById(taskId);
            if (entity != null && "RUNNING".equals(entity.getStatus())) {
                entity.setStatus("CANCELLED");
                entity.setEndTime(LocalDateTime.now());
                if (entity.getStartTime() != null) {
                    entity.setDuration(ChronoUnit.MILLIS.between(entity.getStartTime(), entity.getEndTime()));
                }
                crawlerTaskMapper.updateById(entity);
                System.out.println("✅ 已更新任务状态为已取消: " + taskId);
            }
        } catch (Exception e) {
            System.err.println("停止任务失败: " + e.getMessage());
        }
    }
    
    @Override
    public String getTaskStatus(String taskId) {
        try {
            // 首先检查是否真的在运行
            AtomicBoolean runningFlag = runningTasks.get(taskId);
            if (runningFlag != null && runningFlag.get()) {
                return "RUNNING";
            }
            
            // 从数据库获取状态
            CrawlerTaskEntity entity = crawlerTaskMapper.selectById(taskId);
            if (entity != null) {
                // 如果数据库显示运行中但实际不在运行，更新状态
                if ("RUNNING".equals(entity.getStatus()) && !runningTasks.containsKey(taskId)) {
                    entity.setStatus("FAILED");
                    entity.setErrorMessage("任务异常终止");
                    crawlerTaskMapper.updateById(entity);
                    return "FAILED";
                }
                return entity.getStatus();
            }
            return "UNKNOWN";
        } catch (Exception e) {
            System.err.println("获取任务状态失败: " + e.getMessage());
            return "UNKNOWN";
        }
    }
    
    @Override
    public void deleteTask(String taskId) {
        try {
            // 如果任务正在运行，先停止
            stopTask(taskId);
            // 从运行任务管理器中移除
            runningTasks.remove(taskId);
            // 删除数据库记录
            crawlerTaskMapper.deleteById(taskId);
        } catch (Exception e) {
            System.err.println("删除任务失败: " + e.getMessage());
        }
    }
    
    /**
     * 检查任务是否应该停止
     */
    private boolean shouldStop(String taskId) {
        AtomicBoolean runningFlag = runningTasks.get(taskId);
        return runningFlag == null || !runningFlag.get();
    }
    
    /**
     * 执行爬虫任务
     */
    private void executeCrawlerTask(CrawlerTask task, CrawlerConfig config, Map<String, Object> params) {
        try {
            System.out.println("开始执行爬虫任务: " + task.getTaskName());
            
            // 根据参数调整配置
            if (params != null) {
                if (params.containsKey("maxPages")) {
                    config.setMaxPages((Integer) params.get("maxPages"));
                }
                if (params.containsKey("requestInterval")) {
                    config.setRequestInterval((Integer) params.get("requestInterval"));
                }
            }
            
            // 执行爬虫
            List<PolicyItem> policies = universalCrawlerEngine.crawlWithConfig(config);
            
            // 检查是否应该停止
            if (shouldStop(task.getId())) {
                System.out.println("任务已被取消: " + task.getTaskName());
                task.setStatus("CANCELLED");
                task.setErrorMessage("任务被用户取消");
                return;
            }
            
            // 保存数据到数据库，且进入工作流（包含查重逻辑）
            CrawlerResult saveResult = crawlerService.savePoliciesToDatabase(policies);
            
            // 再次检查是否应该停止
            if (shouldStop(task.getId())) {
                System.out.println("任务已被取消: " + task.getTaskName());
                task.setStatus("CANCELLED");
                task.setErrorMessage("任务被用户取消");
                return;
            }
            
            // 设置任务结果
            task.setCrawledCount(policies.size());
            task.setSavedCount(saveResult.getSuccessCount());
            task.setFailedCount(saveResult.getFailCount());
            task.setStatus("SUCCESS");
            
            System.out.println("爬虫任务执行完成: " + task.getTaskName() + 
                             ", 爬取数量: " + task.getCrawledCount() + 
                             ", 成功保存: " + task.getSavedCount() + 
                             ", 失败数量: " + task.getFailedCount());
            
        } catch (Exception e) {
            task.setStatus("FAILED");
            task.setErrorMessage(e.getMessage());
            task.setFailedCount(task.getCrawledCount());
            System.err.println("爬虫任务执行失败: " + e.getMessage());
            throw e;
        }
    }
    
    /**
     * 更新任务到数据库
     */
    private void updateTaskInDatabase(CrawlerTask task) {
        try {
            CrawlerTaskEntity entity = CrawlerTaskEntity.fromCrawlerTask(task);
            crawlerTaskMapper.updateById(entity);
        } catch (Exception e) {
            System.err.println("更新任务到数据库失败: " + e.getMessage());
        }
    }
} 