package com.chance.cc.crawler.development.bootstrap.sina;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRecordFilter.key;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRecordFilter.keyOrDateRange;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Site_Info;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/6/25 10:31
 * @Description
 *      新浪财经关键词查询
 **/
public class SinaCjSearchKw {
    private static final String domain = "sina";
    private static final String site = "cj_searchKw";

    public static void main(String[] args) {
        searchRequestRecord();
//        searchRequestRecordTest();
//        itemUrl();
    }


    private static void searchRequestRecord(){
        String url = "https://search.sina.com.cn/";
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, turnPage)
                .domain(domain)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(domain))
                .filter(key)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7,null))
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .httpHead("referer","https://t.cj.sina.com.cn/article/search")
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .needParsed(false)
                .needWashed(false)
                .build();
        crawlerRequestRecord.setDownload(false);
        crawlerRequestRecord.setSkipPipeline(true);
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(site);
        crawlerRequestRecord.getHttpRequest().addExtra("searchKwSourceUrl","https://cre.dp.sina.cn/webes/simba/s2?q=%s&size=10&fields=title%2Clabels%2Ctags%2Cstocks%2Cstitle&qtp=match&sort=ctime+desc&idx=simba_caitou&tp=1%2C2%2C3%2C8%2C9%2C10%2C12%2C13%2C14%2C15%2C16%2C17%2C19%2C20&where=(copyright%3D%3D%3F+or+copyright%3C3)+and+published%3D%3D1&check=2&page=1");

        CrawlerRecord commentFilter = new CrawlerRequestRecord();
        commentFilter.setFilter(key);
        commentFilter.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,"queue")));
        commentFilter.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1,null));
        crawlerRequestRecord.tagsCreator().resultTags().getCategoryTag().addKVTag("comment_filter_record", JSON.toJSONString(commentFilter));

        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest("sina_searchKw_keyword",turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/"+domain+"/keys?site=searchKw")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        DevCrawlerController.builder()
                .triggerInfo(domain,domain,System.currentTimeMillis(),domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline("kafka")//控制台输出
                .fileResultPipeline("kafka","D:\\chance\\log\\tets.txt",false) //文件输出
                .requestRecord(crawlerRequestRecord)
                .supportRecord(keywordRecord)
                .build()
                .start();
    }

    private static void searchRequestRecordTest(){
        String url = "https://cre.dp.sina.cn/webes/simba/s2?q=%E9%95%81%E4%BF%A1&page=1&size=10&fields=title%2Clabels%2Ctags%2Cstocks%2Cstitle&qtp=match&sort=ctime+desc&idx=simba_caitou&tp=1%2C2%2C3%2C8%2C9%2C10%2C12%2C13%2C14%2C15%2C16%2C17%2C19%2C20&where=(copyright%3D%3D%3F%2Bor%2Bcopyright%3C3)%2Band%2Bpublished%3D%3D1&check=2";
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, turnPage)
                .domain(domain)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7,null))
                .httpHead("referer","https://t.cj.sina.com.cn/article/search")
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(site);
        crawlerRequestRecord.getHttpRequest().addExtra("keyword","镁信");

        CrawlerRecord commentFilter = new CrawlerRequestRecord();
        commentFilter.setFilter(keyOrDateRange);
        commentFilter.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,"queue")));
        commentFilter.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1,null));
        crawlerRequestRecord.tagsCreator().resultTags().getCategoryTag().addKVTag("comment_filter_record", JSON.toJSONString(commentFilter));


        DevCrawlerController.builder()
                .triggerInfo(domain,domain,System.currentTimeMillis(),domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline()//控制台输出
//                .fileResultPipeline("D:\\chance\\log\\tets.log",true) //文件输出
                .requestRecord(crawlerRequestRecord)
                .build()
                .start();
    }

    private static void itemUrl(){
//        String url = "https://k.sina.cn/article_7493343692_p1bea359cc00100wdhc.html?from=auto&subch=uauto";
        String url = "https://k.sina.com.cn/article_6145283913_v16e49974902001elxq.html";

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, turnPageItem)
                .domain(domain)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7,null))
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(site);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info,"k");

        CrawlerRecord commentFilter = new CrawlerRequestRecord();
        commentFilter.setFilter(key);
        commentFilter.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,"queue")));
//        commentFilter.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1,null));
        crawlerRequestRecord.tagsCreator().resultTags().getCategoryTag().addKVTag("comment_filter_record", JSON.toJSONString(commentFilter));

        DevCrawlerController.builder()
                .triggerInfo(domain,domain,System.currentTimeMillis(),domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline()//控制台输出
//                .consoleResultPipeline("mysql")
//                .fileResultPipeline("D:\\chance\\log\\tets.log",true) //文件输出
                .requestRecord(crawlerRequestRecord)
                .build()
                .start();
    }
}
