package com.bblocks.common.data.handle;

import com.bblocks.area.AreaVo;
import com.bblocks.common.data.support.AreaSpider;
import com.bblocks.common.data.support.AreaSpiderWithRedisCache;
import com.bblocks.util.R;
import com.google.common.collect.Lists;
import com.pig4cloud.pig.common.core.constant.CacheConstants;
import com.pig4cloud.pig.common.core.constant.CommonConstants;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.ResultItems;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.pipeline.Pipeline;

import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * <p>Project: bblocks-pro - AreaCrawlingHandle</p>
 * <p>描述：区域采集服务(目前好像海南省市区不标准会采集失败-其他比较ok)-国家统计局</p>
 *
 * @Author Zhao [125043150@qq.com]
 * @Date 2024/3/18 15:25
 * @Version 1.0
 * @since 8
 */
@Slf4j
@Component
public class AreaCrawlingHandle implements IAreaCrawlingHandle{

    @Autowired
    RedisTemplate redisTemplate;

    /**
     *  采集年份
     */
    @Value("${bb.crawling.area.year:2022}")
    private int year = 2022;

    /**
     * 采集缓存时间(5小时,一天1440分钟，暂时10天吧)
     */
    @Value("${bb.crawling.area.cacheMinutes:14400}")
    private int cacheMinutes = 14400;



    @Override
    public R<List<AreaVo>> crawlingArea(String url,int level) {

        List<AreaVo> areaVos = new CopyOnWriteArrayList<>();
        AtomicInteger failNum = new AtomicInteger(0);
        Spider.create(new AreaSpiderWithRedisCache(year, level,areaVos,redisTemplate))
                .addUrl(url)
                .addPipeline(new Pipeline() {
                    @Override
                    public void process(ResultItems resultItems, Task task) {
                        String url = resultItems.getRequest().getUrl();
                        List<AreaVo> areas = resultItems.get("area");

                        if (areas.isEmpty()) {//url采集失败
                            log.info( "此页面:{} 未爬取数据,请稍后重试!",url);
                            failNum.incrementAndGet();
                        } else {
                            //缓存已采集数据
                            redisTemplate.opsForValue().set(CacheConstants.CRAWLING+":area:"+url,areas,cacheMinutes, TimeUnit.MINUTES);
                            areaVos.addAll(areas);

                            areaVos
                                    .stream()
                                    .filter(areaVo -> url.equalsIgnoreCase(areaVo.getChildUrl()))
                                    .findAny()
                                    .ifPresent(p->{
                                        p.setChildState(1);
                                    });
                        }
                    }
                })
                .thread(2)
                .run();

        R<List<AreaVo>> result = failNum.get() == 0 ?  R.ok(areaVos) : R.restResult(areaVos, CommonConstants.FAIL,"采集失败次数："+failNum.get());
        return result;
    }

    @Override
    public R<List<AreaVo>> crawlingAll(int level) {
        List<AreaVo> areaVos = new CopyOnWriteArrayList<>();
        AtomicInteger failNum = new AtomicInteger(0);
        Spider.create(new AreaSpiderWithRedisCache(year, level,areaVos,redisTemplate))
                .addUrl(AreaSpider.SPIDER_URL + "index.html")
                .addPipeline(new Pipeline() {
                    @Override
                    public void process(ResultItems resultItems, Task task) {
                        String url = resultItems.getRequest().getUrl();
                        List<AreaVo> areas = resultItems.get("area");

                        if (areas.isEmpty()) {//url采集失败
                            log.error( "此页面:{} 未爬取数据,请稍后重试!",url);
                            failNum.incrementAndGet();
                        } else {
                            //缓存已采集数据
                            redisTemplate.opsForValue().set(CacheConstants.CRAWLING+":area:"+url,areas,cacheMinutes, TimeUnit.MINUTES);
                            areaVos.addAll(areas);

                            areaVos
                                    .stream()
                                    .filter(areaVo -> url.equalsIgnoreCase(areaVo.getChildUrl()))
                                    .findAny()
                                    .ifPresent(p->{
                                       p.setChildState(1);
                                    });
                        }
                    }
                })
                .thread(2)
                .run();

        R<List<AreaVo>> result = failNum.get() == 0 ?  R.ok(areaVos) : R.restResult(areaVos, CommonConstants.FAIL,"采集失败次数："+failNum.get());
        return result;
    }




}
