package com.chance.cc.crawler.development.bootstrap.dcdapp.dynamic;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;

import java.util.ArrayList;
import java.util.List;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

public class DCDDynamicStart {
    private static final String DOMAIN = "dcdapp";
    private static final String SIDE = "dynamic";
    public static void main(String[] args) {
       // seriesCrawlerRecord();
       chexi();
        //articleUrl();
//        Map<String,String> map = new HashMap<>();
//        map.put("name","value");
//        System.out.println(map);

    }
    public static void seriesCrawlerRecord(){
        String url = "https://www.dongchedi.com/";
        String side = "dynamic";
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(DOMAIN)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(DOMAIN))
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*611,null))
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                // .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        crawlerRequestRecord.tagsCreator().bizTags().addSite(SIDE);
        crawlerRequestRecord.tagsCreator().bizTags().addSiteBiz(SIDE);
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(DOMAIN);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("domainName",DOMAIN);
        List<String> moduleTag = new ArrayList<>();//不同板块
        moduleTag.add("dongtai");//动态板块
        //moduleTag.add("selected");//精华板块
        //moduleTag.add("wenda");//问答板块
        //moduleTag.add("dakareyi");//评书板块
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("moduleTag",moduleTag);
        List<String> moduleSort = new ArrayList<>();
        moduleSort.add("1");//最新发布
        //moduleSort.add("2");//最新回复
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("moduleSort",moduleSort);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("accumulateUrl","accumulateUrl");

        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord =  new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));
        //以json转换保证数据传输的完整性
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));


        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dcdapp_series_keyword",turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/dcdapp/keys?site=everyDayRealtime")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();


        DevCrawlerController.builder()
                .triggerInfo(DOMAIN,DOMAIN,System.currentTimeMillis(),DOMAIN)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(DOMAIN))
                .crawlerThreadNum(20)
                .consoleResultPipeline("kafka")
                .consoleResultPipeline("redis")
                .fileResultPipeline("kafka","F:\\chance_log\\dcdapp\\kafka_1.log",false)
                .fileResultPipeline("redis","F:\\chance_log\\dcdapp\\redis_1.log",false)
                .requestRecord(crawlerRequestRecord)
                .supportRecord(keywordRecord)
                .build("com.chance.cc.crawler.development.scripts.dcdapp")
                .start();
    }



    //解析明细页链接
    private static void chexi(){
        String url = "https://www.dongchedi.com/auto/series/289";
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl(url)
                .domain(DOMAIN)
                .httpConfig(HttpConfig.me(DOMAIN))
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*1,null))
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        record.tagsCreator().bizTags().addSite(SIDE);
        record.tagsCreator().bizTags().addSiteBiz(SIDE);
        record.tagsCreator().bizTags().addDomain(DOMAIN);
        record.tagsCreator().bizTags().addCustomKV("domainName",DOMAIN);
        List<String> moduleTag = new ArrayList<>();//不同板块
        //moduleTag.add("dongtai");//动态板块
        //moduleTag.add("selected");//精华板块
        moduleTag.add("wenda");//问答板块
        //moduleTag.add("dakareyi");//评书板块
        record.tagsCreator().bizTags().addCustomKV("moduleTag",moduleTag);
        List<String> moduleSort = new ArrayList<>();
        //moduleSort.add("1");//最新发布
        moduleSort.add("2");//最新回复
        record.tagsCreator().bizTags().addCustomKV("moduleSort",moduleSort);
       // record.tagsCreator().bizTags().addCustomKV("accumulateUrl","accumulateUrl");//累计链接标签
        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord =  new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 ,null));
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));

        //以json转换保证数据传输的完整性
        record.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));


        DevCrawlerController.builder()
                .triggerInfo(DOMAIN,DOMAIN,System.currentTimeMillis(),DOMAIN)
                .crawlerThreadNum(1)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(DOMAIN))
                .consoleResultPipeline("redis")
                .consoleResultPipeline("kafka")
                .fileResultPipeline("kafka","F:\\chance_log\\dcdapp\\dynamic\\kafka-8-3_1.log",false)
                .fileResultPipeline("redis","F:\\chance_log\\dcdapp\\dynamic\\ZUIXIN.log",false)
                .requestRecord(record)
                .build("com.chance.cc.crawler.development.scripts.dcdapp")
                .start();

    }

    public static void articleUrl(){
        String url = "https://www.dongchedi.com/ugc/article/1710239243357197";
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(DOMAIN)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(DOMAIN))
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*7,null))
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .build();
        crawlerRequestRecord.setNeedWashPage(true);
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(DOMAIN);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(SIDE);
        crawlerRequestRecord.tagsCreator().bizTags().addSiteBiz(SIDE);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("domainName","dongchedi");
        //crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("turn_page","turn_page");

        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord =  new CrawlerRecord();
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24*1,null));
        //以json转换保证数据传输的完整性
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));


        DevCrawlerController.builder()
                .triggerInfo(DOMAIN,DOMAIN,System.currentTimeMillis(),DOMAIN)
                .crawlerThreadNum(2)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(DOMAIN))
                .consoleResultPipeline("kafka")
                .fileResultPipeline("kafka","F:\\chance_log\\dcdapp\\dynamic\\common.log",false)
                .requestRecord(crawlerRequestRecord)
                .build("com.chance.cc.crawler.development.scripts.dcdapp")
                .start();
    }
}
