package com.liqw.crawler.crawler.impl;

import com.liqw.crawler.crawler.HouseCrawler;
import com.liqw.crawler.crawler.downloader.SeleniumDownloader;
import com.liqw.crawler.crawler.processor.AbstractPageProcessor;
import com.liqw.crawler.crawler.processor.AbstractPageProcessorManager;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.ResultItems;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ResultItemsCollectorPipeline;

import java.util.List;
import java.util.Map;

@RequiredArgsConstructor
@Slf4j
@Component
public class WebHouseCrawler implements HouseCrawler {

    private final AbstractPageProcessorManager processorManager;

    @Override
    public Map<String, Object> crawlProperty(String type, String url) {
        AbstractPageProcessor abstractPageProcessor = processorManager.getAbstractPageProcessor(type);
        if (null != abstractPageProcessor) {
            ResultItemsCollectorPipeline pipeline = new ResultItemsCollectorPipeline();
            Spider.create(abstractPageProcessor)
                    .addUrl(url)
                    .setDownloader(new SeleniumDownloader())
                    .addPipeline(pipeline)
                    .run();

            List<ResultItems> collected = pipeline.getCollected();
            return CollectionUtils.isEmpty(collected) ? null : collected.get(0).getAll();
        }
        log.info("无此类型");
        return null;
    }
}