package com.chaosj.webmagic.scrape;

import cn.hutool.core.bean.BeanUtil;
import com.alibaba.fastjson.JSONObject;
import com.chaosj.webmagic.IScrapeHandler;
import com.chaosj.webmagic.downer.HttpLoginClientDownloader;
import com.chaosj.webmagic.dto.BlogHostEnums;
import com.chaosj.webmagic.md.MdConverterPipeline;
import com.chaosj.webmagic.process.BasePageProcessor;
import org.apache.http.impl.client.CloseableHttpClient;
import org.springframework.beans.BeanUtils;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.downloader.Downloader;
import us.codecraft.webmagic.pipeline.ResultItemsCollectorPipeline;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.scheduler.RedisScheduler;

import java.lang.management.ManagementFactory;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @className: ScrapeSiteHandler
 * @description:
 * @author: caoyangjie
 * @date: 2022/1/4
 **/
public class BlogSiteHandler implements IScrapeHandler<String, String> {
    private ConcurrentHashMap<String, ResultItemsCollectorPipeline> collectorPipelineMap = new ConcurrentHashMap<>();
    private ConcurrentHashMap<BlogHostEnums, BasePageProcessor> processors = new ConcurrentHashMap<>();
    private ConcurrentHashMap<BlogHostEnums, Downloader> downloaders = new ConcurrentHashMap<>();
    private JSONObject params;

    @Override
    public void scrape(String request) {
        BlogHostEnums blog = BlogHostEnums.position(request);
        BasePageProcessor processor = processors.computeIfAbsent(blog, blogType-> BeanUtils.instantiateClass(blogType.getProccess()));
        processor.setParams(this.params);
        Spider spider = Spider.create(processor)
//                .setScheduler(new RedisScheduler("127.0.0.1"))
                .addPipeline(new MdConverterPipeline())
                .addUrl(request);
        if( blog.loginFlag() ){
            spider.setDownloader(downloaders.computeIfAbsent(blog, blogType-> new HttpLoginClientDownloader()));
            HttpLoginClientDownloader loginClientDownloader = (HttpLoginClientDownloader) downloaders.get(blog);
            CloseableHttpClient loginClient = loginClientDownloader.getHttpClient(processor.getSite());
            processor.doLogin(loginClient);
        }
        spider.thread(ManagementFactory.getOperatingSystemMXBean().getAvailableProcessors()).run();
    }

    public BlogSiteHandler setParams(JSONObject json){
        this.params = json;
        return this;
    }

    @Override
    public String getResult(String requestDto) {
        return requestDto;
    }
}
