package com.chance.cc.crawler.development.bootstrap.enorth;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2020/11/30 13:41
 * @Description
 *      北方网
 **/
public class Enorth {
    public static void main(String[] args) {
        String domain = "enorth";
        String enorthEntranceUrl = "http://enorth.com.cn/";
        String modelEntranceUrl = "http://economy.enorth.com.cn/tjjj/jjzx/";

        DevCrawlerController.builder()
                .triggerInfo(domain,domain,System.currentTimeMillis(),domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline()//控制台输出
//                .fileResultPipeline("D:\\chance\\log\\tets.log",true) //文件输出
                .requestRecord(createCrawlerRequestRecord(domain,enorthEntranceUrl))
//                .requestRecord(createCrawlerRequestRecord(domain,modelEntranceUrl))

                .build()
                .start();
    }

    public static CrawlerRequestRecord createCrawlerRequestRecord(String domain,String url){
        Proxy proxy = new Proxy();
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");


        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(url)
                .recordKey(url)
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*1,null))
                .releaseTime(System.currentTimeMillis())
//                .proxy(proxy)
                .build();

        return crawlerRequestRecord;
    }
}
