package com.chance.cc.crawler.development.bootstrap.tencent;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.lang3.StringUtils;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.filter;

/**
 * @author lt
 * @version 1.0
 * @date 2021-03-24 11:22:19
 * @email okprog@sina.com
 */
public class TencentStart {
    private static final String domain = "tenxun";
    public static final String site = "news";
    public static final String siteBiz = "news-realtime";

    public static final String SPORTS_TOKEN = "sports_token";
    public static final String SPORTS_EXT = "sports_ext";

    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        //HL89Q19E86E2987D
        //71F33D94CE5F7BF2
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    public static void main(String[] args) {
        //设置https协议访问
        System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2,SSLv3");

        //文章采集
//        CrawlerRequestRecord articleCrawler = doSearchNewsCrawler();
        CrawlerRequestRecord articleCrawler = itemCrawler();
        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-",filter,domain,"comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24,null));
        articleCrawler.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));

        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .triggerInfo(domain, domain + "_trigger", System.currentTimeMillis(), domain + "_job")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain)) //内存队列
                .consoleResultPipeline() //控制台输t出
//                .fileResultPipeline("D:\\chance\\data\\tenxun\\tenxun_news_test.json",false)
                .requestRecord(articleCrawler)  //more job
                .supportRecord(test(articleCrawler))
//                .crawlerThreadNum(3)
                .build();



        //是否采集评论
        devCrawlerController.getCrawlerJob().getScheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        devCrawlerController.start();
    }

    public static CrawlerRequestRecord test(CrawlerRequestRecord articleCrawler){
        System.out.println(articleCrawler);
        return null;
    }


    public static CrawlerRequestRecord doSearchNewsCrawler(){

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .recordKey("https://new.qq.com/")
                .httpUrl("https://new.qq.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*2,null))
                .proxy(proxy)
                .needParsed(true)
                .needWashed(false)
                .build();
//        List<String> keywords = Arrays.asList("health","sports","auto");
        List<String> keywords = Arrays.asList("sports");
        Map<String,Object> extras = new HashMap<>();
        extras.put("keywords",keywords);
        requestRecord.getHttpRequest().setExtras(extras);
        requestRecord.tagsCreator().bizTags().addCustomKV(SPORTS_TOKEN,"8f6b50e1667f130c10f981309e1d8200");
        requestRecord.tagsCreator().bizTags().addCustomKV(SPORTS_EXT,"221,203,210,229,209,222,220,201");

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);
        return requestRecord;
    }


    public static CrawlerRequestRecord itemCrawler(){
        String url = "https://new.qq.com/omn/20210616/20210616A020AS00.html";
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
//                .recordKey("https://new.qq.com/")
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*2,null))
                .proxy(proxy)
                .needParsed(true)
                .needWashed(false)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .build();

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);
        return requestRecord;
    }
}
