package com.chance.cc.crawler.development.bootstrap.bitauto;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.lang3.StringUtils;

import java.util.HashMap;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.filter;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2020/11/16 16:47
 * @Description
 *     车家号进行全部新闻中采集
 **/
public class BitautoHaoArticle {
    private static String domain = "bitauto";

    public static void main(String[] args) {
//        haoArticle();
        haoArticleNews();
    }
    private static void haoArticle(){
        String url = "https://news.yiche.com/info/categoryId0_p0_l0_f0_g0_c0_b0_1.html";
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1,null))
                .releaseTime(System.currentTimeMillis())
                .build();
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite("hao_article");


        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .triggerInfo(domain, domain, System.currentTimeMillis(), domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline("kafka_result")//控制台输出
                .fileResultPipeline("kafka_result","F:\\chance_log\\bitauto\\bitauto_all_article.log",false) //文件输出
                .requestRecord(crawlerRequestRecord)
                .build("com.chance.cc.crawler.development.scripts.bitauto");
        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-",filter,domain,"comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 30,null));
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
        devCrawlerController.getCrawlerJob().getScheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        devCrawlerController.start();

    }



    private static void haoArticleNews(){
        String url = "https://hao.yiche.com/site_web/hao/api/get_latest_article_list";
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(url)
                .recordKey(url + "1")
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .httpHead("x-platform","phone")
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1,null))
                .releaseTime(System.currentTimeMillis())
                .build();
        Map<String,Object> extras = new HashMap<>();
        extras.put("pageIndex",1);
        crawlerRequestRecord.getHttpRequest().setExtras(extras);
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite("hao");


        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .triggerInfo(domain, domain, System.currentTimeMillis(), domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline("kafka_result")//控制台输出
//                .fileResultPipeline("kafka_result","F:\\chance_log\\bitauto\\bitauto_all_article.log",false) //文件输出
                .requestRecord(crawlerRequestRecord)
                .build("com.chance.cc.crawler.development.scripts.bitauto");
        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-",filter,domain,"comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 30,null));
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
        devCrawlerController.getCrawlerJob().getScheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        devCrawlerController.start();

    }
}
