package com.chance.cc.crawler.prod.command.job.domain.news.tenxun.realtime;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.queue.crawler.CrawlerQueueConfig;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.news.tenxun.TXCommonScript;
import org.apache.commons.lang3.StringUtils;

import java.util.*;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.internalDownload;

/**
 * @author lt
 * @version 1.0
 * @date 2021-04-09 17:45:24
 * @email okprog@sina.com
 */
public class TXRealtimeCrawlerSchedulerJob extends TXCommonScript {
    private static final String crawler_level = "realtime";
    private static final String siteBiz = "news-realtime";
    public static final String site = "news";
    public static final String SPORTS_TOKEN = "sports_token";
    public static final String SPORTS_EXT = "sports_ext";

    public static void main(String[] args) {
//        publishCrawlerScheduleJobInfo();
        System.out.println(crawlerScheduler().generateCrawlerTriggerKey());
    }

    /**
     * 发布任务信息
     * @return
     */
    public static CrawlerJob publishCrawlerScheduleJobInfo(){

        CrawlerJob crawlerJob = crawlerScheduler();

        //发布定时任务
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(crawlerJob));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("腾讯新闻板块每十分钟实时采集任务");
        crawlerScheduleJob.setCrawlerKey(crawlerJob.generateCrawlerKey());
        HttpPage httpPage = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布任务：" + httpPage.getRawText());
        return crawlerJob;
    }

    public static CrawlerJob crawlerScheduler(){

        //文章采集
        CrawlerRequestRecord articleCrawler = doSearchNewsCrawler();

        //request队列名
        String requestQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"queue");

        //配置结果队列信息
        String resultQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"result","queue");
        CrawlerQueueConfig crawlerQueueConfig = new CrawlerQueueConfig(
                resultQueueName,
                CrawlerQueueConfig.Content.result,
                CrawlerQueueConfig.Storage.redis,
                CrawlerQueueConfig.Structure.list);

        //同步到mysql callback record
        CrawlerRequestRecord syncCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain + "_" + site +"_sync", turnPageItem)
                .httpUrl(metaServiceHttpPrefix + "/crawler/domain/common/api/v1/"+domain+"/search/results/sync?resultQueue="
                        +resultQueueName + "&site=" + site)
                .requestLabelTag(supportCallback)
                .requestLabelTag(internalDownload)
                .build();

        CrawlerJob crawlerJob = CrawlerJob.builder()
                .triggerInfo(domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-", domain, site, crawler_level, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-", "crawler", domain, site, crawler_level, "queue")))
                .kafkaResultPipeline("result_kafka", kafkaTopic, null)
                .queueResultPipeline("result_redis",crawlerQueueConfig)
                .crawlerJobThreadNumber(5)
                .requestRecord(articleCrawler)
                .supportRecord(syncCrawlerRecord)
                .build();
        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
        filterCrawlerRecord.addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-",filter,domain,site,"comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(1,null));
        articleCrawler.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
        crawlerJob.getScheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        return crawlerJob;
    }

    /**
     * 初始record
     * @return
     */
    public static CrawlerRequestRecord doSearchNewsCrawler(){

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .recordKey("https://new.qq.com/")
                .httpUrl("https://new.qq.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,"queue")))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(1,null))
                .proxy(proxy)
                .needParsed(true)
                .needWashed(false)
                .build();
        List<String> keywords = Arrays.asList("health","sports","auto");
        Map<String,Object> extras = new HashMap<>();
        extras.put("keywords",keywords);
        requestRecord.getHttpRequest().setExtras(extras);
        requestRecord.tagsCreator().bizTags().addCustomKV(SPORTS_TOKEN,"8f6b50e1667f130c10f981309e1d8200");
        requestRecord.tagsCreator().bizTags().addCustomKV(SPORTS_EXT,"221,203,210,229,209,222,220,201");

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);
        return requestRecord;
    }
}
