package com.chance.cc.crawler.development.command.job.domain.kuaihe;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.command.job.CrawlerJobCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.downloader.HttpRequestBody.ContentType.FORM;
import static com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/1/22 17:31
 * @Description
 *      1919
 **/
public class KuaiHeCrawlerScheduleJob {

    public static String domainId = "kuaihe";
    public static String startUrl = "https://kuaihe-720.1919.cn/sys/init.do";
    private static CrawlerJobCommand crawlerJobCommand = new CrawlerJobCommand("192.168.1.215",9599);

    public static void main(String[] args) {
        publishCrawlerScheduleJobInfo();
    }

    public static CrawlerJob publishCrawlerScheduleJobInfo(){

        CrawlerJob kuaiheCrawlerSchdule = kuaiheCrawlerSchdule(domainId);

        //发布定时采集作业
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domainId);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(kuaiheCrawlerSchdule));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("1919快喝定時採集");
        crawlerScheduleJob.setCrawlerKey(kuaiheCrawlerSchdule.generateCrawlerKey());
        HttpPage page = crawlerJobCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布crawler文章作业：" + page.getRawText());
        return kuaiheCrawlerSchdule;

    }

    public static CrawlerJob kuaiheCrawlerSchdule(String domainId) {
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domainId)
                .httpUrl(startUrl)
                .httpConfig(HttpConfig.me(domainId))
                .filter(CrawlerEnum.CrawlerRecordFilter.key)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(String.join("-","filter",domainId,"queue")))
                .releaseTime(System.currentTimeMillis())
                .build();
        HttpRequest httpRequest = requestRecord.getHttpRequest();
        httpRequest.setMethod(HttpConstant.Method.POST);
        httpRequest.setRequestBody(HttpRequestBody.custom("data=".getBytes(),FORM,"UTF-8"));

        return CrawlerJob.builder()
                .crawlerJobThreadNumber(20)
                .triggerInfo(
                        domainId,
                        CrawlerMetaConstant.ScheduleJobTrigger_Simple,
                        System.currentTimeMillis(),
                        ScheduleJobTriggerJob_Realtime)
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue")))
                .fileResultPipeline(null, "/data/chance_crawler_test/data/kuaihe/kuaihe.log", false)
//                .kafkaResultPipeline(null,"tmp_news",null)
                .requestRecord(requestRecord)
                .build();
    }
}
