package com.chance.cc.crawler.development.bootstrap.healthrexun;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import com.chance.cc.crawler.development.scripts.healthrexun.HealthReXunSiteCrawlerScript;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

public class HealthReXunStart {

//    private static Proxy proxy = new Proxy();
//    static {
//        //代理配置
//        proxy.setHost("http-dyn.abuyun.com");
//        proxy.setPort(9020);
//        proxy.setUsername("HEW657EL99F83S9D");
//        proxy.setPassword("8916B1F3F10B1979");
//    }



    public static void main(String[] args) {

        news();

    }

    private static void news(){
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(HealthReXunSiteCrawlerScript.domain, turnPage)
                .httpUrl(HealthReXunSiteCrawlerScript.startUrl)
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(70*24,null))
                .needWashed(false)
                .needParsed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addSite("news");

        CrawlerRequestRecord initNewsCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest(HealthReXunSiteCrawlerScript.domain,turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/health-rexun/keys?site=news")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        DevCrawlerController.builder()
                .triggerInfo(HealthReXunSiteCrawlerScript.domain,"cron",System.currentTimeMillis(),"realtime")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(StringUtils.joinWith("-","crawler",HealthReXunSiteCrawlerScript.domain,"queue"))) //内存队列
                .consoleResultPipeline() //控制台输t出
                .requestRecord(requestRecord)
                .supportRecord(initNewsCrawlerRecord)
                .build()
                .start();
    }

}
