package com.ruoyi.spider.spiderConfig.processor;

import com.ruoyi.spider.domain.ProcessorConfig;
import com.ruoyi.spider.domain.SpiderField;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ConsolePipeline;
import us.codecraft.webmagic.pipeline.Pipeline;
import us.codecraft.webmagic.proxy.Proxy;
import us.codecraft.webmagic.proxy.SimpleProxyProvider;
import us.codecraft.webmagic.scheduler.BloomFilterDuplicateRemover;
import us.codecraft.webmagic.scheduler.QueueScheduler;

import java.util.LinkedHashMap;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;

/**
 * @author xgt(小光头)
 * @version 1.0
 * @date 2021-1-16 11:04
 */
public class DefaultProcessor extends AbstractProcessor {

    public Pipeline pipeline;

    public DefaultProcessor(ProcessorConfig processorConfig) {
        super(processorConfig);
    }

    public DefaultProcessor(Site site, ProcessorConfig processorConfig,Pipeline pipeline) {
        super(site, processorConfig);
        this.pipeline = pipeline;
    }

    /**
     * 运行爬虫并返回结果
     *
     * @return
     */
    @Override
    public CopyOnWriteArrayList<LinkedHashMap<String, String>> execute() {

        CopyOnWriteArrayList<LinkedHashMap<String, String>> datas = new CopyOnWriteArrayList<>();

        Spider spider = Spider.create(this);
        spider.addUrl(processorConfig.getEntryUrls());
      //  spider.addPipeline(new ConsolePipeline());
        spider.addPipeline((resultItems, task) -> this.processData(resultItems, datas, spider)); // 收集数据并返回
      //  spider.addPipeline(pipeline);

        spider.setScheduler(new QueueScheduler()).run();

    return datas;

    }
}
