package com.chance.cc.crawler.prod.command.job.domain.vm.autohome.geely;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.vm.autohome.AutoHomeCommonCrawlerSchedulerJob;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Site_Info;

/**
 * 吉利项目 汽车之家 论坛 发布时间
 */
public class AutoHomeForumGeelyReleaseCrawlerSchedulerJob extends AutoHomeCommonCrawlerSchedulerJob {
    private static final String crawler_level = "realtime";
    private static final String site = "forum";
    private static final String siteBiz = "geely";//吉利项目
    private static final String metaSite = siteBiz;

    private static final String OrderType = "order_type";//回复排序 1  发布排序 2

    public static void main(String[] args) {

        publishAutoHomeCrawlerScheduleJobInfo();

    }

    /**
     * 发布任务信息
     * @return
     */
    public static CrawlerJob publishAutoHomeCrawlerScheduleJobInfo(){
        CrawlerJob autoHomeScheduler = autoHomeScheduler();
//        //是否初始化队列
//        autoHomeScheduler.setInitRequestQueue(false);

        //发布定时任务
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(autoHomeScheduler));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("汽车之家论坛临时采集任务");
        crawlerScheduleJob.setCrawlerKey(autoHomeScheduler.generateCrawlerKey());
        HttpPage httpPage = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布任务：" + httpPage.getRawText());
        return autoHomeScheduler;
    }
    public static CrawlerJob autoHomeScheduler(){
        //关键词 source record
        CrawlerRequestRecord keywordCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("autohome_keywords",turnPageItem)
                .httpUrl(metaServiceHttpPrefix+"/v1/meta/"+domain+"/keys?site=" + metaSite)
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        //论坛-采集
        CrawlerRequestRecord forumCrawler = doForumCrawler();

        CrawlerJob crawlerJob = CrawlerJob.builder()
                .triggerInfo(domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-", site,siteBiz, crawler_level, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-", "crawler", domain, site, crawler_level, "queue")))
                .fileResultPipeline(null, "/data/chance_crawler_runner/logs/node/auto/autohome_jeely_realtime.log", false)
                .kafkaResultPipeline(null, kafkaTopciForRealTimeJob, null)
                .crawlerJobThreadNumber(3)
                .requestRecord(forumCrawler)
                .supportRecord(keywordCrawlerRecord)
                .build();
        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-",filter,domain,"comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 + 2,null));
        forumCrawler.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
        crawlerJob.getScheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        return crawlerJob;

    }

    public static CrawlerRequestRecord doForumCrawler(){
        String site = "forum";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://www.autohome.com.cn/")
                .httpUrl("https://www.autohome.com.cn/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7 + 2,null))
                .proxy(proxy)
                .needParsed(false)
                .needWashed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);
        requestRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info,siteBiz);
        requestRecord.tagsCreator().bizTags().addCustomKV(OrderType,"2");
        return requestRecord;
    }
}
