package com.chance.cc.crawler.prod.command.job.domain.news.baidu.baiduzhishu;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.news.NewsCommonScript;
import com.chance.cc.crawler.prod.command.job.domain.news.baidu.BaiDuCommonScript;
import org.apache.commons.lang3.StringUtils;

public class ZhiShuRealtimeCrawlerSchedulerJob extends BaiDuCommonScript {
    public static final String domain = "baidu";

    private static final String crawler_level = "realtime";

    private static final String siteBiz = "realtime";

    private static final String site = "zhishu";

    public static void main(String[] args) {

        publishCrawlerScheduleJobInfo();
    }

    public static CrawlerJob publishCrawlerScheduleJobInfo() {

        CrawlerJob crawlerJob = crawlerScheduler();

        //发布定时任务
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(crawlerJob));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("百度指数，每天凌晨2点实时采集任务");
        crawlerScheduleJob.setCrawlerKey(crawlerJob.generateCrawlerKey());
        HttpPage httpPage = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布任务：" + httpPage.getRawText());

        return crawlerJob;

    }

    public static CrawlerJob crawlerScheduler() {

        //文章采集
        CrawlerRequestRecord requestRecord = doSearchNewsCrawler();

        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/baidu/keys?site=zhishu")
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.supportSource)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();


        CrawlerJob crawlerJob = CrawlerJob.builder()
                .triggerInfo(domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),//baidu-zhishu-realtime-realtime   domain会自己添加
                        StringUtils.joinWith("-", site, crawler_level, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-", "crawler", domain, site, siteBiz, crawler_level, "queue")))
                //.fileResultPipeline(null,"/data/chance_crawler_runner/logs/node/baidu.log",false)
                .kafkaResultPipeline(null, NewsCommonScript.kafkaTopic, null)
                .crawlerJobThreadNumber(10)//线程数
                .requestRecord(requestRecord)
                .supportRecord(keywordRecord) //关键字
                .build();
        return crawlerJob;

    }

    /**
     * 初始record
     *
     * @return(√)
     */
    public static CrawlerRequestRecord doSearchNewsCrawler() {

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage) //turnpage 翻页
                .domain(domain)
                .httpUrl("http://index.baidu.com/v2/main/index.html#/subject/3454")
                .recordKey("http://index.baidu.com/v2/main/index.html#/subject/3454")
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(7 * 24, null))
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-", "filter", domain, site, siteBiz, "queue")))//根据redis中的值去重（不是本地）-
                .build();

        requestRecord.getHttpRequest().addExtra("url","http://index.baidu.com/v2/main/index.html#/subject/%s");

        requestRecord.tagsCreator().bizTags().addDomain(domain);

        requestRecord.tagsCreator().bizTags().addSite(site);

        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);

        return requestRecord;
    }

}
