package com.chance.cc.crawler.development.command.job.domain.weibo;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.command.job.CrawlerJobCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPage;
import static com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime;

/**
 * @ClassName weibo
 * @Description TODO
 * @Author songding
 * @Date 2021/10/27 10:23
 * @Version 1.0
 **/
public class WeiboTieCrawlerScheduleJob {
    public static final String domainId = "weibo";
    public static final String site = "tie";

    public static void main(String[] args) {
        publishCrawlerScheduleJobInfo();
//        System.out.println(weiboCrawlerSchdule().generateCrawlerTriggerKey());
    }

    private static CrawlerJobCommand crawlerJobCommand = new CrawlerJobCommand("192.168.1.215", 9599);

    public static CrawlerJob publishCrawlerScheduleJobInfo() {

        CrawlerJob weiboSchdule = weiboCrawlerSchdule();

        //发布定时采集作业
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domainId);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(weiboSchdule));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("微博帖子单次采集");
        crawlerScheduleJob.setCrawlerKey(weiboSchdule.generateCrawlerKey());
        HttpPage page = crawlerJobCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布作业：" + page.getRawText());
        return weiboSchdule;
    }

    public static CrawlerJob weiboCrawlerSchdule() {
        String sourceUrl = "https://s.weibo.com/?Refer=";
        String siteBiz = "simple-id3-20211112";
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, turnPage)
                .httpUrl(sourceUrl)
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domainId);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);

        //获取地址的时候需要添加site确定拿取到的地址为本次需要的
        String requestQueueName = "crawler-weibo-tie-simple-queue";

        return CrawlerJob.builder()
                .crawlerJobThreadNumber(30)
                .triggerInfo(
                        domainId,
                        CrawlerMetaConstant.ScheduleJobTrigger_Simple,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-", domainId, site, siteBiz, ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(requestQueueName))
                .fileResultPipeline(null, "/data/chance_crawler_test/logs/node/tie.log", false)
                .kafkaResultPipeline(null, "tmp_social_media_weibo", null)
                .requestRecord(requestRecord)
                .build();

    }
}