package com.syl.ippool.graspimpl;

import com.syl.ippool.domain.IpBasePojo;
import com.syl.ippool.ifice.grasp.IGrasp;
import com.syl.ippool.ifice.pool.IGraspIpPool;
import com.syl.ippool.utils.IpUtils;
import lombok.extern.slf4j.Slf4j;

import java.util.ArrayList;
import java.util.List;

/**
 * 所有抓取ip的爬虫管理器
 *
 * @author sunyalong
 * @date 2019-12-12 18:38:48
 */
@Slf4j
public class GraspManager {

    // 所有爬虫的注册器
    public static volatile List<IGrasp> graspList = new ArrayList();

    // 当前使用的线程池
    public static volatile IGraspIpPool graspIpPool = null;

    public GraspManager(IGraspIpPool iGraspIpPool) {
        graspIpPool = iGraspIpPool;
    }

    /**
     * 将所有爬虫注册到，爬虫代理管理器
     */
    public void register() {
        // 注册西刺代理爬虫
        graspList.add(new XiCiDailiIGrasp());
        graspList.add(new JiuBaDailiGrasp());
    }

    /**
     * 启动所有爬虫
     */
    public void run() {
        while (true) {
            try {
                for (IGrasp iGrasp : graspList) {
                    // 获得当前网站获取的所有ip
                    try {
                        List<IpBasePojo> execute = iGrasp.execute();

                        log.info(iGrasp.getWebsiteName()+"-"+iGrasp.getUrl()+"成功获取"+execute.size()+"条ip");
                        // 检测当前ip
                        IpUtils.checkActive(execute,graspIpPool);

                    } catch (Exception e) {
                        log.info(iGrasp.getWebsiteName(),iGrasp.getUrl(),"获取ip失败");
                    }
                }
                // 休眠1分钟后再次爬取所有网站ip
                Thread.sleep(60000);

            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }
}
