package com.inkFlow.crawl.service.impl;

import cn.dev33.satoken.stp.StpUtil;
import cn.hutool.core.lang.Dict;
import com.inkFlow.common.core.constant.CrawlConstants;
import com.inkFlow.common.core.constant.CrawlDBConstants;
import com.inkFlow.common.core.domain.R;
import com.inkFlow.common.core.domain.model.LoginUser;
import com.inkFlow.common.core.utils.MapstructUtils;
import com.inkFlow.common.core.utils.StringUtils;
import com.inkFlow.common.json.utils.JsonUtils;
import com.inkFlow.common.mybatis.core.page.TableDataInfo;
import com.inkFlow.common.mybatis.core.page.PageQuery;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.inkFlow.common.satoken.utils.LoginHelper;
import com.inkFlow.crawl.core.template.SimpleEntity;
import com.inkFlow.crawl.core.template.text.SimpleTextPageProcessor;
import com.inkFlow.crawl.core.template.text.SimpleTextPipeline;
import com.inkFlow.crawl.domain.CrawlSourceConfigExtTextRule;
import com.inkFlow.crawl.mapper.CrawlSourceConfigExtTextRuleMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.inkFlow.crawl.domain.bo.CrawlSourceConfigBo;
import com.inkFlow.crawl.domain.vo.CrawlSourceConfigVo;
import com.inkFlow.crawl.domain.CrawlSourceConfig;
import com.inkFlow.crawl.mapper.CrawlSourceConfigMapper;
import com.inkFlow.crawl.service.ICrawlSourceConfigService;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ConsolePipeline;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Collection;
import java.util.Objects;

/**
 * 爬虫管理Service业务层处理
 *
 * @author inkFlow
 * @date 2025-08-21
 */
@Slf4j
@RequiredArgsConstructor
@Service
public class CrawlSourceConfigServiceImpl implements ICrawlSourceConfigService {
    private final Map<Integer, Spider> spiderMap = new HashMap<>();

    private final CrawlSourceConfigMapper baseMapper;
    @Autowired
    CrawlSourceConfigExtTextRuleMapper crawlSourceConfigExtTextRuleMapper;

    /**
     * 查询爬虫管理
     *
     * @param crawlId 主键
     * @return 爬虫管理
     */
    @Override
    public CrawlSourceConfigVo queryById(Integer crawlId) {
        return baseMapper.selectVoById(crawlId);
    }

    /**
     * 分页查询爬虫管理列表
     *
     * @param bo        查询条件
     * @param pageQuery 分页参数
     * @return 爬虫管理分页列表
     */
    @Override
    public TableDataInfo<CrawlSourceConfigVo> queryPageList(CrawlSourceConfigBo bo, PageQuery pageQuery) {
        LambdaQueryWrapper<CrawlSourceConfig> lqw = buildQueryWrapper(bo);
        Page<CrawlSourceConfigVo> result = baseMapper.selectVoPage(pageQuery.build(), lqw);
        return TableDataInfo.build(result);
    }

    /**
     * 查询符合条件的爬虫管理列表
     *
     * @param bo 查询条件
     * @return 爬虫管理列表
     */
    @Override
    public List<CrawlSourceConfigVo> queryList(CrawlSourceConfigBo bo) {
        LambdaQueryWrapper<CrawlSourceConfig> lqw = buildQueryWrapper(bo);
        return baseMapper.selectVoList(lqw);
    }

    private LambdaQueryWrapper<CrawlSourceConfig> buildQueryWrapper(CrawlSourceConfigBo bo) {
        Map<String, Object> params = bo.getParams();
        LambdaQueryWrapper<CrawlSourceConfig> lqw = Wrappers.lambdaQuery();
        lqw.orderByAsc(CrawlSourceConfig::getCrawlId);
        lqw.like(StringUtils.isNotBlank(bo.getCrawlName()), CrawlSourceConfig::getCrawlName, bo.getCrawlName());
        lqw.eq(StringUtils.isNotBlank(bo.getCrawlUrl()), CrawlSourceConfig::getCrawlUrl, bo.getCrawlUrl());
        lqw.eq(bo.getCrawlType() != null, CrawlSourceConfig::getCrawlType, bo.getCrawlType());
        lqw.eq(StringUtils.isNotBlank(bo.getCharset()), CrawlSourceConfig::getCharset, bo.getCharset());
        lqw.eq(bo.getCrawlEnabled() != null, CrawlSourceConfig::getCrawlEnabled, bo.getCrawlEnabled());
        lqw.eq(StringUtils.isNotBlank(bo.getCrawlRemark()), CrawlSourceConfig::getCrawlRemark, bo.getCrawlRemark());
        return lqw;
    }

    /**
     * 新增爬虫管理
     *
     * @param bo 爬虫管理
     * @return 是否新增成功
     */
    @Override
    public Boolean insertByBo(CrawlSourceConfigBo bo) {
        CrawlSourceConfig add = MapstructUtils.convert(bo, CrawlSourceConfig.class);
        validEntityBeforeSave(add);
        boolean flag = baseMapper.insert(add) > 0;
        if (flag) {
            bo.setCrawlId(add.getCrawlId());
        }
        return flag;
    }

    /**
     * 修改爬虫管理
     *
     * @param bo 爬虫管理
     * @return 是否修改成功
     */
    @Override
    public Boolean updateByBo(CrawlSourceConfigBo bo) {
        CrawlSourceConfig update = MapstructUtils.convert(bo, CrawlSourceConfig.class);
        validEntityBeforeSave(update);
        return baseMapper.updateById(update) > 0;
    }

    /**
     * 保存前的数据校验
     */
    private void validEntityBeforeSave(CrawlSourceConfig entity) {
        //TODO 做一些数据校验,如唯一约束
    }

    /**
     * 校验并批量删除爬虫管理信息
     *
     * @param ids     待删除的主键集合
     * @param isValid 是否进行有效性校验
     * @return 是否删除成功
     */
    @Override
    public Boolean deleteWithValidByIds(Collection<Integer> ids, Boolean isValid) {
        if (isValid) {
            //TODO 做一些业务上的校验,判断是否需要校验
        }
        return baseMapper.deleteByIds(ids) > 0;
    }

    /**
     * 开启或关闭爬虫
     *
     * @return
     */
    @Override
    public R<Void> openOrCloseCrawl(Integer crawlId, boolean openFlag) {
        CrawlSourceConfig crawlConfig = baseMapper.selectById(crawlId);
        if (Objects.isNull(crawlConfig)) {
            return R.fail("找不到对应的爬虫");
        }
        Integer crawlType = crawlConfig.getCrawlType();
        switch (crawlType) {
            case CrawlDBConstants.CRAWL_SOURCE_CONFIG_CRAWL_TYPE_1:
                return openFlag ? this.openCrawlText(crawlConfig,crawlId) : this.closeCrawlText(crawlConfig,crawlId);
            case CrawlDBConstants.CRAWL_SOURCE_CONFIG_CRAWL_TYPE_2:
                break;
            case CrawlDBConstants.CRAWL_SOURCE_CONFIG_CRAWL_TYPE_3:
                break;
            default:
                return R.fail("爬虫类型异常");
        }
        return R.ok();
    }

    /**
     * 开启文本类型的爬虫
     * @return
     */
    private R<Void> openCrawlText(CrawlSourceConfig crawlConfig,Integer crawlId) {
        CrawlSourceConfigExtTextRule extTextRule = crawlSourceConfigExtTextRuleMapper.selectById(crawlId);
        if (Objects.isNull(extTextRule)) {
            return R.fail("爬虫规则异常");
        }
        Site site = Site.me();
        if (StringUtils.isNotEmpty(extTextRule.getUserAgent())) {
            site.setUserAgent(extTextRule.getUserAgent());
        }
        if (Objects.nonNull(extTextRule.getSleepTime())) {
            site.setSleepTime(extTextRule.getSleepTime());
        }
        if (StringUtils.isNotEmpty(crawlConfig.getCharset())) {
            site.setCharset(crawlConfig.getCharset());
        }
        Dict headersDict = JsonUtils.parseMap(extTextRule.getHeaders());
        if (Objects.nonNull(headersDict)) {
            for (Map.Entry<String, Object> entry : headersDict.entrySet()) {
                site.addHeader(entry.getKey(), (String) entry.getValue());
            }
        }
        String categoryPageUrlRule = extTextRule.getCategoryPageUrlRule();
        String[] categoryIdList = StringUtils.split(extTextRule.getCategoryPageIdList(), ",");
        LoginUser loginUser = LoginHelper.getLoginUser();
        SimpleTextPageProcessor processorTemplate = new SimpleTextPageProcessor(site, extTextRule, loginUser);
        Spider spider = Spider.create(processorTemplate);
        for (String categoryId : categoryIdList) {
            String categoryPageUrl = categoryPageUrlRule.replace(CrawlConstants.REPLACE_CATEGORY_ID,categoryId);
            Request request = new Request(categoryPageUrl);
            Map<String, Object> extraMap = new HashMap<>();
            // category_page -> 分类页面
            extraMap.put(CrawlConstants.PROCESSOR_LEVEL_KEY, CrawlConstants.PROCESSOR_LEVEL_VALUE_CATEGORY_PAGE);
            SimpleEntity simpleEntity = new SimpleEntity();
            simpleEntity.setCategoryPageIndex(1);
            simpleEntity.setCrawlId(crawlId);
            extraMap.put(CrawlConstants.SIMPLE_ENTITY_KEY, simpleEntity);
            request.setExtras(extraMap);
            spider.addRequest(request);
        }
        spider.addPipeline(new SimpleTextPipeline());
        spider.addPipeline(new ConsolePipeline());
        if (Objects.nonNull(extTextRule.getThreads())) {
            spider.thread(extTextRule.getThreads());
        }
        spider.start();
        spiderMap.put(crawlId, spider);
        return R.ok();
    }
    /**
     * 关闭文本类型的爬虫
     * @return
     */
    private R<Void> closeCrawlText(CrawlSourceConfig crawlConfig, Integer crawlId) {
        CrawlSourceConfigExtTextRule extTextRule = crawlSourceConfigExtTextRuleMapper.selectById(crawlId);
        if (Objects.isNull(extTextRule)) {
            return R.fail("爬虫规则异常");
        }
        Spider spider = spiderMap.get(crawlId);
        if (Objects.nonNull(spider)) {
            Spider.Status status = spider.getStatus();
            if (Spider.Status.Running.equals(status)) {
                spider.stopWhenComplete();
            }
        }
        return R.ok();
    }
}
