package com.yn.module.spider.service.tools;

import cn.hutool.core.exceptions.ExceptionUtil;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.yn.module.spider.config.strategy.SpiderDTO;
import com.yn.module.spider.config.strategy.StrategyFactory;
import com.yn.module.spider.init.data.DataInit;
import com.yn.module.spider.pojo.CommEx;
import com.yn.module.spider.pojo.DicSpider;
import com.yn.module.spider.service.CommExService;
import com.yn.module.spider.utils.HttpUtils;
import com.yn.module.spider.utils.base.BaseSpiderService;
import com.yn.module.spider.utils.base.DetailSpiderService;
import com.yn.module.spider.utils.base.MainSpiderService;
import com.yn.module.spider.utils.constant.Const;
import com.yn.module.spider.utils.constant.OptEnum;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.CompletableFuture;

/**
 * @description: 结合策略模式简化爬虫操作
 * @author: 叶甯
 * @create: 2022/7/28 14:58
 * @since: 1.0.0
 * @copyright (C), 2022, https://gitee.com/johnny .All rights reserved.
 */
@Component
@Slf4j
public class SpiderService {

    @Resource
    private CommExService commExService;

    /**
     * 数据爬取
     *
     * @param dto
     */
    public void dataSpider(SpiderDTO dto) {
        // 设置相应参数
        final SpiderDTO spiderDTO = StrategyFactory.getStrategy4Spider(dto);
        // 获取对应配置
        final DicSpider spider = DataInit.DIC_SPIDER.get(dto.type().value() + Const.DEFAULT_SPLIT + dto.svc().value());
        CompletableFuture.supplyAsync(() -> {
            Random random = new Random();

            while (true) {
                try {
                    Thread.sleep(1000 + random.nextInt(1000 * 60 * 3));
                    //  爬取数据
                    if (OptEnum.OpearionType_Main.equals(dto.type())) {
                        spiderMain(spiderDTO, spider);
                        // 校验数据是否爬取完成
                        MainSpiderService spiderService = (MainSpiderService) spiderDTO.currentService();
                        if (spiderService.queryCurrentMaxPage(spider.getDataVer()).intValue()==spider.getPages()) {
                            System.out.println("数据爬取完成。。。。。。");
                            break;
                        }
                    } else if (OptEnum.OpearionType_Detail.equals(dto.type())) {
                        spiderDetail(spiderDTO, spider);
                        // 校验数据是否爬取完成
                        DetailSpiderService spiderService = (DetailSpiderService) spiderDTO.currentService();
                        if (spiderService.queryMaxPid(spider.getDataVer()).intValue()==spiderService.queryMaxMainTableId(spider.getDataVer()).intValue()) {
                            System.out.println("数据爬取完成。。。。。。");
                            break;
                        }
                    }

                } catch (Exception e) {
                    CommEx commEx = new CommEx();
                    commEx.setEid(IdWorker.getId());
                    commEx.setEx(ExceptionUtil.stacktraceToString(e));
                    commEx.setTableName(spider.getTbName());
                    commExService.save(commEx);
                    e.printStackTrace();
                    try {
                        Thread.sleep(1000 * 60 * 5 + random.nextInt(1000));
                    } catch (InterruptedException ex) {
                        ex.printStackTrace();
                    }
                }

            }
            return 1;
        });
    }

    /**
     * 爬取主表操作
     *
     * @param dto
     */
    private void spiderMain(SpiderDTO dto, DicSpider spider) throws Exception {
        // 1. 查询当前db中代表最大值的数据
        MainSpiderService spiderService = (MainSpiderService) dto.currentService();
        int startPage = spiderService.queryCurrentMaxPage(spider.getDataVer());
        startPage = startPage == 0 ? 1 : startPage;
        int MAX_PAGE = spider.getPages();
        // 2. 删除指定数据
        Map<String, Object> delParams = new HashMap<>();
        delParams.put("page", startPage);
        delParams.put(BaseSpiderService.DATA_VERSION,spider.getDataVer());
        spiderService.deleteCurrentData(delParams);

        // 3. 查询数据并保存
        HttpUtils httpUtils = new HttpUtils();
        for (int i = startPage; i <= MAX_PAGE; i++) {
            // 填入http请求中的参数
            Map<String, Object> httpParams = new HashMap<>();
            Map<String, Object> selfParams = new HashMap<>();
            System.out.println("current---i -" + i);
            httpParams.put(BaseSpiderService.CURRENT_PAGE, i);
            spiderService.setParams(null, httpParams, selfParams);

            // 3. 请求URL获取数据并保存
            String body = httpUtils.postQuery(spider.getUrl(), httpParams);
            spiderService.saveData2Db(body, selfParams);
            Random random = new Random();
            Thread.sleep(random.nextInt(1000 * 60 * 2));
        }

    }

    /**
     * 爬取明细表操作
     *
     * @param dto
     */
    private void spiderDetail(SpiderDTO dto, DicSpider spider) throws Exception {
        int length = 1008;//每次爬取数据的最大量
        // 查询明细中最大的pid
        DetailSpiderService spiderService = (DetailSpiderService) dto.currentService();
        Long startPid = spiderService.queryMaxPid(spider.getDataVer());

        // 1. 删除指定数据
        Map<String, Object> delParams = new HashMap<>();
        delParams.put("pid", startPid);
        delParams.put(BaseSpiderService.DATA_VERSION,spider.getDataVer());
        spiderService.deleteCurrentData(delParams);

        // 2 查询父类数据
        List<?> dataList = spiderService.queryParentDataList(length, startPid,spider.getDataVer());
        int dataLength = dataList.size();
        HttpUtils httpUtils = new HttpUtils();
        for (int i = 0; i < dataLength; i++) {
            Object mainObj = dataList.get(i);
            // 填入http请求中的参数
            Map<String, Object> httpParams = new HashMap<>();
            httpParams.put(BaseSpiderService.CURRENT_PAGE, i);
            // 填入自定义参数
            Map<String, Object> selfParams = new HashMap<>();
            selfParams.put(BaseSpiderService.DATA_VERSION,spider.getDataVer());
            spiderService.setParams(mainObj, httpParams, selfParams);

            // 3. 请求URL获取数据并保存
            String body = httpUtils.postQuery(spider.getUrl(), httpParams);
            spiderService.saveData2Db(body, selfParams);
            Random random = new Random();
            Thread.sleep(random.nextInt(1000 * 60 * 2));
        }
    }
}
