package com.chance.cc.crawler.development.command.job.domain.kaoyan;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.command.job.CrawlerJobCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPage;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2020/12/11 13:20
 * @Description
 *      kaoyan
 **/
public class KaoyanCrawlerScheduleJob {

    public static String domainId = "kaoyan";
    public static String articleEntranceUrl = "http://bbs.kaoyan.com/forum.php?mod=forumdisplay&fid=67&filter=author&orderby=dateline";
    public static String interactionOrCommentEntranceUrl = "http://bbs.kaoyan.com/forum.php?mod=forumdisplay&fid=67&filter=lastpost&orderby=lastpost";
    private static CrawlerJobCommand crawlerJobCommand = new CrawlerJobCommand("192.168.1.215",9599);


    public static void main(String[] args) {

        publishKaoyanArticleCrawlerScheduleJobInfo();
        publishKaoyanInteractionOrCommentCrawlerScheduleJobInfo();
    }

    public static CrawlerJob publishKaoyanArticleCrawlerScheduleJobInfo(){


        CrawlerJob kaoyanCrawlerArticleSchdule = kaoyanCrawlerArticleSchdule(domainId);

        //发布定时采集作业
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domainId);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(kaoyanCrawlerArticleSchdule));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("考研论坛根据最新发帖时间定时采集");
        crawlerScheduleJob.setCrawlerKey(kaoyanCrawlerArticleSchdule.generateCrawlerKey());
        HttpPage page = crawlerJobCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("根据最新发帖时间发布crawler文章作业：" + page.getRawText());
        return kaoyanCrawlerArticleSchdule;

    }

    public static CrawlerJob publishKaoyanInteractionOrCommentCrawlerScheduleJobInfo(){

        CrawlerJob kaoyanCrawlerInteractionOrCommentSchdule = kaoyanCrawlerInteractionOrCommentSchdule(domainId);

        //发布定时采集作业
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domainId);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(kaoyanCrawlerInteractionOrCommentSchdule));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("考研论坛根据最新回复时间定时采集");
        crawlerScheduleJob.setCrawlerKey(kaoyanCrawlerInteractionOrCommentSchdule.generateCrawlerKey());
        HttpPage page = crawlerJobCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("根据最新回复时间发布crawler作业：" + page.getRawText());
        return kaoyanCrawlerInteractionOrCommentSchdule;

    }

    public static CrawlerJob kaoyanCrawlerArticleSchdule(String domainId) {
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, turnPage)
                .httpUrl(articleEntranceUrl)//不添加recordKey方法，默认以地址作为key值进行过滤操作
                .releaseTime(System.currentTimeMillis())
//                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(domainId))
//                .addFilterInfo(FilterUtils.memoryFilterKeyInfo())
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1 ,null))
                .build();

        requestRecord.tagsCreator().bizTags().addCustomKV("order","post");


        return CrawlerJob.builder()
                .triggerInfo(
                        domainId,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-","post",CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue")))
                .fileResultPipeline(null, "/data/chance_crawler_test/data/kaoyan/kaoyan_post.log", false)
                .kafkaResultPipeline(null,"tmp_news",null)
                .requestRecord(requestRecord)
                .build();
    }
    public static CrawlerJob kaoyanCrawlerInteractionOrCommentSchdule(String domainId) {
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, turnPage)
                .httpUrl(interactionOrCommentEntranceUrl)//不添加recordKey方法，默认以地址作为key值进行过滤操作
                .releaseTime(System.currentTimeMillis())
//                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(domainId))
//                .addFilterInfo(FilterUtils.memoryFilterKeyInfo())
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1 ,null))
                .build();

        requestRecord.tagsCreator().bizTags().addCustomKV("order","reply");

        return CrawlerJob.builder()
                .triggerInfo(
                        domainId,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-","reply",CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue")))
                .fileResultPipeline(null, "/data/chance_crawler_test/data/kaoyan/kaoyan_reply.log", false)
                .kafkaResultPipeline(null,"tmp_news",null)
                .requestRecord(requestRecord)
                .build();
    }
}
