package com.chance.cc.crawler.development.bootstrap.ifeng.health;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;

/**
 * @author lt
 * @version 1.0
 * @date 2021-02-09 11:12:30
 * @email okprog@sina.com
 */
public class IFengStart {
    private static final String domain = "ifeng";

    private static Proxy proxy = new Proxy();

    static {
        //代理配置
        //HL89Q19E86E2987D
        //71F33D94CE5F7BF2
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    public static void main(String[] args) {
        //设置https协议访问
        System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2,SSLv3");

        //文章采集
        CrawlerRequestRecord articleCrawler = doSearchNewsCrawler();
        //文章采集
        CrawlerRequestRecord sportsCrawler = doSportsNewsCrawler();

        DevCrawlerController.builder()
                .triggerInfo(domain, domain + "_trigger", System.currentTimeMillis(), domain + "_job")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain)) //内存队列
                .consoleResultPipeline() //控制台输t出
//                .fileResultPipeline("D:\\chance\\data\\ifeng\\ifeng_article_test.json",false)
                .requestRecord(articleCrawler)  //more job
                .crawlerThreadNum(5)
                .build()
                .start();
    }

    public static CrawlerRequestRecord doSearchNewsCrawler() {
        String site = "health";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .recordKey("http://health.ifeng.com/")
                .httpUrl("http://health.ifeng.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7, null))
                .proxy(proxy)
                .needParsed(true)
                .needWashed(false)
                .build();

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

    public static CrawlerRequestRecord doSportsNewsCrawler() {
        String site = "sports";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .recordKey("http://sports.ifeng.com/")
                .httpUrl("http://sports.ifeng.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7, null))
                .proxy(proxy)
                .needParsed(true)
                .needWashed(false)
                .build();

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

}
