package com.fansl.allround.upms.helper.region;

import cn.hutool.core.util.StrUtil;
import com.fansl.allround.common.core.constant.enums.RegionLevelEnum;
import com.fansl.allround.upms.api.entity.SysRegion;
import com.fansl.allround.upms.helper.RegionHelper;
import com.fansl.allround.upms.service.SysRegionService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.redis.core.RedisTemplate;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.selector.Html;
import us.codecraft.webmagic.selector.Selectable;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;

import static com.fansl.allround.upms.helper.RegionHelper.HELP_URL;

/**
 * @author fansl
 * @Description: TODO
 * @date 2019/8/23 16:32
 */
@Slf4j
public class CityProcessor implements PageProcessor {
    /**
     * 部分一：抓取网站的相关配置，包括编码、抓取间隔、重试次数等
     */
    private final Site site = Site.me()
            .setCharset("gbk").setTimeOut(100 * 1000)
            .setRetryTimes(3).setSleepTime(3000)
            .setCycleRetryTimes(3);

    private final SysRegionService sysRegionService;
    private final RedisTemplate redisTemplate;
    private final boolean recursionFlag;

    public CityProcessor(SysRegionService sysRegionService,
                         RedisTemplate redisTemplate,
                         boolean recursionFlag) {
        this.sysRegionService = sysRegionService;
        this.redisTemplate = redisTemplate;
        this.recursionFlag = recursionFlag;
    }

    @Override
    /**
     * process是定制爬虫逻辑的核心接口，在这里编写抽取逻辑
     */
    public void process(Page page) {
        String parentCode = StrUtil.fillAfter(StrUtil.subAfter(page.getUrl().toString(), "/", true)
                .replace(".html", ""), "0".charAt(0), 12);
        log.info("处理parentCode为：{}的信息", parentCode);
        // 部分二：定义如何抽取页面信息，并保存下来
        Html html = page.getHtml();
        List<Selectable> cityHtmlList = html.xpath("//tr[@class='citytr']").nodes();

        // 部分三：从页面发现后续的url地址来抓取
        //市信息
        List<SysRegion> cityList = new ArrayList<>();
        List<String> cityUrlList = new ArrayList<>();
        cityHtmlList.stream().forEach(selectable -> {
            //判断是否有链接
            String linkUrl = selectable.links().get();
            String code = linkUrl == null ?
                    selectable.$("td", "text").all().get(0) : selectable.$("td a", "text").all().get(0);
            String name = linkUrl == null ?
                    selectable.$("td", "text").all().get(1) : selectable.$("td a", "text").all().get(1);
            log.info("市行政编码：{}", code);
            log.info("市名称：{}", name);
            log.info("市下的链接：{}", linkUrl);
            SysRegion city = new SysRegion();
            city.setParentCode(parentCode)
                    .setName(name)
                    .setCode(code)
                    .setLevel(RegionLevelEnum.CITY.getLevel())
                    .setNextLevelUrl(linkUrl);
            RegionHelper.setRegionAlias(city);
            cityList.add(city);
            if (StrUtil.isNotBlank(linkUrl)) {
                cityUrlList.add(linkUrl);
            }
        });
        page.putField(RegionLevelEnum.CITY.name(), cityList);
        //保存到redis中
        page.putField(HELP_URL, page.getUrl());
        if (recursionFlag) {
            //异步执行
            CompletableFuture.runAsync(() -> {
                //获取下一级数据
                if (cityUrlList.isEmpty()) {
                    return;
                }
                RegionHelper.executeSpider(RegionLevelEnum.COUNTY,
                        cityUrlList, recursionFlag, sysRegionService, redisTemplate);
            });
        }
    }

    @Override
    public Site getSite() {
        return site;
    }
}
