package com.chance.cc.crawler.prod.command.job.domain.vm.dcdapp.adhoc;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.queue.crawler.CrawlerQueueConfig;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.vm.dcdapp.DCDAppCommonCrawlerSchedulerJob;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRecordFilter.dateRange;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Site_Info;

/**
 * @Author songding
 * @Date Create in 2021.09
 * @Description
 *      懂车帝 口碑 临时
 **/
public class DcdappWebPraiseSimpleCrawlerSchedulerJob extends DCDAppCommonCrawlerSchedulerJob {
    private static final String crawler_level = "simple";
    private static final String metaSite = "sgmKw";//adhocKw  sgmKw

    public static void main(String[] args) {
        publishCrawlerScheduleJobInfo();
    }

    public static CrawlerJob publishCrawlerScheduleJobInfo(){
        CrawlerJob crawlerJob = crawlerSchedulejob();

        //发布定时采集作业
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(crawlerJob));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("懂车帝口碑临时采集");
        crawlerScheduleJob.setCrawlerKey(crawlerJob.generateCrawlerKey());
        HttpPage page = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布crawler作业：" + page.getRawText());
        return crawlerJob;

    }

    public static CrawlerJob crawlerSchedulejob(){
        String url = "https://www.dcdapp.com/";
        String site = "praise";
        String siteBiz = "praise";
        String site_info = "sgm-20210818-20211104";
        HttpConfig me = HttpConfig.me(domain);
        me.setHttpSite(site);
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, turnPage)
                .domain(domain)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(domain))
//                .filter(key)
                .filter(dateRange)
//                .addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,siteBiz,"queue")))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(0 ,new long[]{1629216000000L,System.currentTimeMillis()}))
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(site);
        crawlerRequestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info,site_info);

        CrawlerRecord commentFilter = new CrawlerRequestRecord();
        commentFilter.setFilter(dateRange);
        commentFilter.addFilterInfo(FilterUtils.dateRangeFilterInfo(0 ,new long[]{1629216000000L,System.currentTimeMillis()}));
        crawlerRequestRecord.tagsCreator().resultTags().getCategoryTag().addKVTag("comment_filter_info", JSON.toJSONString(commentFilter));

        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dcdapp_series_keyword",turnPageItem)
                .httpUrl("http://"+metaServiceIp+":"+metaServicePort+"/v1/meta/"+domain+"/keys?site="+metaSite)
//                .httpUrl("http://192.168.1.215:9599/v1/meta/dcdapp/keys?site=vmBmw")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        //每条帖子的结果
        String resultPostQueueName = "dcdapp_praise_item_url";
        CrawlerQueueConfig crawlerPostQueueConfig = new CrawlerQueueConfig(
                resultPostQueueName,
                CrawlerQueueConfig.Content.result,
                CrawlerQueueConfig.Storage.redis,
                CrawlerQueueConfig.Structure.list);
        //每一条帖子结果同步
        CrawlerRequestRecord syncCrawlerPostRecord = CrawlerRequestRecord.builder()
                .startPageRequest("sina_post_result", turnPageItem)
                .httpUrl("http://"+metaServiceIp+":"+metaServicePort+"/crawler/domain/common/api/v1/"+domain+"/search/results/sync?resultQueue=" + resultPostQueueName + "&site=dcdapp_praise_item_url")
                .requestLabelTag(supportCallback)
                .requestLabelTag(internalDownload)
                .build();

        return CrawlerJob.builder()
                .crawlerJobThreadNumber(10)
                .triggerInfo(
                        domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),//dcdapp-praise-praise-simple-20210919-praise-realtime
                        StringUtils.joinWith("-",site,siteBiz,site_info,CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))//crawler-dcdapp-praise-praise-simple-20210919-simple-queue
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-","crawler",domain,site,siteBiz,"simple","queue")))
//                .queueResultPipeline("redis",crawlerPostQueueConfig)
                .fileResultPipeline("kafka", "/data/chance_crawler_runner/logs/node/dcdapp/dcdapp_praise_simple.log", false)
//                .fileResultPipeline("redis", "/data/chance_crawler_runner/logs/node/dcdapp/dcdapp_praise_simple_redis.log", false)
                .kafkaResultPipeline("kafka",kafkaTopicForTraceJob,null)
                .requestRecord(crawlerRequestRecord)
                .supportRecord(keywordRecord)
//                .supportRecord(syncCrawlerPostRecord)
                .build();
    }
}
