package com.chance.cc.crawler.prod.command.job.domain.news.baidu.yidianzixun;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.news.NewsCommonScript;
import com.chance.cc.crawler.prod.command.job.domain.news.baidu.BaiDuCommonScript;
import org.apache.commons.lang3.StringUtils;

public class YiDianZiXunRealtimeCrawlerSchedulerJob extends BaiDuCommonScript {
    public static final String domain = "baidu";

    private static final String crawler_level = "realtime";

    private static final String siteBiz = "realtime";

    private static final String site = "YDZX";

    public static void main(String[] args) {

        publishCrawlerScheduleJobInfo();
    }

    private static CrawlerJob publishCrawlerScheduleJobInfo() {
        CrawlerJob crawlerJob = crawlerScheduler();

        //发布定时任务
        CrawlerScheduleJob crawlerScheduleJob =new CrawlerScheduleJob();
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(crawlerJob));
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setNote("一点资讯文章板块   文章信息每周实时采集任务");
        crawlerScheduleJob.setCrawlerKey(crawlerJob.generateCrawlerKey());
        HttpPage httpPage = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布任务：" + httpPage.getRawText());

        return  crawlerJob;
    }

    public static CrawlerJob crawlerScheduler() {
        //文章采集
        CrawlerRequestRecord requestRecord = doSearchNewsCrawler();

        CrawlerJob crawlerJob=CrawlerJob.builder()
                .triggerInfo(
                         domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-", site, crawler_level, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-", "crawler", domain, site, siteBiz, crawler_level, "queue")))
                .kafkaResultPipeline(null, NewsCommonScript.kafkaTopic, null)
                //.fileResultPipeline(null,"/data/chance_crawler_runner/logs/node/baidu.log",true)
                .crawlerJobThreadNumber(10)//线程数
                .requestRecord(requestRecord)
                .build();
        return  crawlerJob;
    }


    /**
     * 初始record
     *
     * @return
     */
    private static CrawlerRequestRecord doSearchNewsCrawler() {

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage) //turnpage 翻页
                .domain(domain)
                .httpUrl("https://www.baidu.com/s?ie=utf-8&f=8&rsv_bp=1&rsv_idx=2&tn=baiduhome_pg&wd=nike&ct=2097152&si=yidianzixun.com&rsv_spt=1&oq=%25E4%25BD%25A0%25E5%258F%25AF&rsv_pq=ee20d1a400086822&rsv_t=f4f8FMnYzwUdyaEUWNM9k%2FPCbVNSJwbS2G%2BSL1zWBm3Klv6VDJZhyToC2kkIdXGDs7qy&rqlang=cn&rsv_enter=1&rsv_dl=tb&rsv_btype=t&inputT=2580&rsv_sug3=7&rsv_sug1=7&rsv_sug7=100&rsv_jmp=slow")
                .recordKey("https://www.baidu.com/s?ie=utf-8&f=8&rsv_bp=1&rsv_idx=2&tn=baiduhome_pg&wd=nike&ct=2097152&si=yidianzixun.com&rsv_spt=1&oq=%25E4%25BD%25A0%25E5%258F%25AF&rsv_pq=ee20d1a400086822&rsv_t=f4f8FMnYzwUdyaEUWNM9k%2FPCbVNSJwbS2G%2BSL1zWBm3Klv6VDJZhyToC2kkIdXGDs7qy&rqlang=cn&rsv_enter=1&rsv_dl=tb&rsv_btype=t&inputT=2580&rsv_sug3=7&rsv_sug1=7&rsv_sug7=100&rsv_jmp=slow")
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(7*24,null))
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,siteBiz,"queue")))//根据redis中的值去重（不是本地）-
                .build();

        requestRecord.tagsCreator().bizTags().addDomain(domain);

        requestRecord.tagsCreator().bizTags().addSite(site);

        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);

        return requestRecord;
    }
}
