package com.chance.cc.crawler.development.bootstrap.baufortune;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;

import java.util.HashMap;
import java.util.Map;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2020/11/18 11:17
 * @Description
 *      紫荆财智
 **/
public class Baufortune {
    public static void main(String[] args) {
        String domain = "baufortune";

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl("http://www.baufortune.com")
                .recordKey("http://www.baufortune.com")
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*2,null))
                .releaseTime(System.currentTimeMillis())
                .build();
        Map<String,Object> crawlerTypeList = new HashMap<>();
        crawlerTypeList.put(CrawlerEnum.CrawlerDataType.article.enumVal(), CrawlerEnum.CrawlerDataType.article.enumVal());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerTypeList);

        DevCrawlerController.builder()
                .triggerInfo(domain,domain,System.currentTimeMillis(),domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline()//控制台输出
//                .fileResultPipeline("D:\\chance\\log\\tets.log",true) //文件输出
                .requestRecord(crawlerRequestRecord)
                .build()
                .start();

    }
}
