package com.chance.cc.crawler.prod.command.job.domain.vm.autohome.realtime;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.queue.crawler.CrawlerQueueConfig;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.vm.autohome.AutoHomeCommonCrawlerSchedulerJob;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author lt
 * @version 1.0
 * @date 2020-12-16 16:42:40
 * @email okprog@sina.com
 */
public class AutoHomeWebCheVideoCrawlerScheduleJob extends AutoHomeCommonCrawlerSchedulerJob {
    private static final String crawler_level = "realtime";
    private static final String site = "che_video";
    private static final String InfoType = "info_type";//文章1  视频2

    public static void main(String[] args) {

        publishAutoHomeCrawlerScheduleJobInfo();
//        System.out.println(autoHomeScheduler().generateCrawlerTriggerKey());
    }

    /**
     * 发布任务信息
     * @return
     */
    public static CrawlerJob publishAutoHomeCrawlerScheduleJobInfo(){

        CrawlerJob autoHomeScheduler = autoHomeScheduler();

        //发布定时任务
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(autoHomeScheduler));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("汽车之家车家号视频实时采集任务");
        crawlerScheduleJob.setCrawlerKey(autoHomeScheduler.generateCrawlerKey());
        HttpPage httpPage = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布任务：" + httpPage.getRawText());
        return autoHomeScheduler;
    }

    public static CrawlerJob autoHomeScheduler(){

        //车家号-最新文章采集
        CrawlerRequestRecord cheArticleCrawler = doCheArticleCrawler();

        //request队列名
        String requestQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"queue");

        //配置结果队列信息
        String resultQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"result","queue");
        CrawlerQueueConfig crawlerQueueConfig = new CrawlerQueueConfig(
                resultQueueName,
                CrawlerQueueConfig.Content.result,
                CrawlerQueueConfig.Storage.redis,
                CrawlerQueueConfig.Structure.list);

        //同步到mysql callback record
        CrawlerRequestRecord syncCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain+"_"+site+"_sync", turnPageItem)
                .httpUrl(metaServiceHttpPrefix + "/crawler/domain/common/api/v1/"+domain+"/search/results/sync?resultQueue="
                        +resultQueueName + "&site=" + site)
                .requestLabelTag(supportCallback)
                .requestLabelTag(internalDownload)
                .build();
        CrawlerJob crawlerJob = CrawlerJob.builder()
                .triggerInfo(domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-", site, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(requestQueueName))
                .kafkaResultPipeline("kafka", kafkaTopciForRealTimeJob, null)
                .queueResultPipeline("redis",crawlerQueueConfig)
                .crawlerJobThreadNumber(10)
                .requestRecord(cheArticleCrawler)
                .supportRecord(syncCrawlerRecord)
                .build();
//        //添加评论去重信息
//        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
//        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
//        filterCrawlerRecord.addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain,"comment")));
//        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 10,null));
//        cheArticleCrawler.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
//        crawlerJob.getScheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        return crawlerJob;
    }

    /**
     * 初始record
     * @return
     */
    public static CrawlerRequestRecord doCheArticleCrawler(){

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://chejiahao.autohome.com.cn/?infotype=3")
                .httpUrl("https://chejiahao.autohome.com.cn/?infotype=3")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(1,null))
                .proxy(proxy)
                .build();

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz("hao_video");
        requestRecord.tagsCreator().bizTags().addCustomKV(InfoType,"3");
        return requestRecord;
    }
}
