package com.chance.cc.crawler.development.command.job.domain.pcauto;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.queue.crawler.CrawlerQueueConfig;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.command.job.CrawlerJobCommand;
import com.chance.cc.crawler.development.command.schedule.CrawlerJobScheduleCommand;
import com.chance.cc.crawler.development.command.script.ScriptCommand;
import com.chance.cc.crawler.development.command.trigger.CrawlerJobTriggerCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-01-12 17:57:15
 * @email okprog@sina.com
 */
public class PcAutoArticleKeywordsSyncSchedulerJob {
    private static final String domain = "pcauto";
    private static final String crawler_level = "sync";

    private static final CrawlerJobTriggerCommand crawlerJobTriggerCommand =
            new CrawlerJobTriggerCommand("192.168.1.215",9599);
    private static final ScriptCommand scriptCommand =
            new ScriptCommand("192.168.1.215", 9599);
    private static CrawlerJobScheduleCommand crawlerJobScheduleCommand =
            new CrawlerJobScheduleCommand("192.168.1.215",9589);
    private static String device_name = "crawler_dev_host";

    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        //H5168QRFNIU3804D
        //5F6B3610BB719FAA
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("H5168QRFNIU3804D");
        proxy.setPassword("5F6B3610BB719FAA");
    }

    public static void main(String[] args) {
        CrawlerJob crawlerJob = publishPcAutoCrawlerSchedulerJobInfo();
        publishPcAutoArticleKWCrawlerScheduleTriggerInfo(crawlerJob.generateCrawlerKey(),crawlerJob.generateCrawlerTriggerKey());
        publishCrawlerNodeScript();

    }

    @Test
    public void pcAutoArticleKwJobScheduler(){
        crawlerJobScheduleCommand.publishCrawlerJob("pcauto-article_keywords-realtime-cron",device_name);
//        crawlerJobScheduleCommand.republishCrawlerJob("pcauto-article_keywords-realtime-cron",device_name);
    }


    public static CrawlerJob publishPcAutoCrawlerSchedulerJobInfo(){
        CrawlerJob pcAutoScheduler = pcAutoScheduler(domain);

        //发布定时任务
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(pcAutoScheduler));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("太平洋新闻列表链接同步任务");
        crawlerScheduleJob.setCrawlerKey(pcAutoScheduler.generateCrawlerKey());
        CrawlerJobCommand crawlerJobCommand = new CrawlerJobCommand("192.168.1.215",9599);
        HttpPage httpPage = crawlerJobCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布任务：" + httpPage.getRawText());
        return pcAutoScheduler;
    }

    public static CrawlerJob pcAutoScheduler(String domain){
        //易车号-最新文章采集
        CrawlerRequestRecord keyWordCrawler = doArticleKeyWordSyncCrawler();
        String site = keyWordCrawler.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        //request队列名
        String requestQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"queue");

        //配置结果队列信息
        String resultQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"result","queue");
        CrawlerQueueConfig crawlerQueueConfig = new CrawlerQueueConfig(
                resultQueueName,
                CrawlerQueueConfig.Content.result,
                CrawlerQueueConfig.Storage.redis,
                CrawlerQueueConfig.Structure.list);

        //同步到mysql callback record
        CrawlerRequestRecord syncCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("bitauto_hao_article_sync", turnPageItem)
                .httpUrl("http://192.168.1.215:9599/crawler/domain/common/api/v1/pcauto/search/results/sync?resultQueue="
                        +resultQueueName + "&site=" + site)
                .requestLabelTag(supportCallback)
                .requestLabelTag(internalDownload)
                .build();

        return CrawlerJob.builder()
                .triggerInfo(domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-",site,CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(requestQueueName))
                .queueResultPipeline("",crawlerQueueConfig)
                .crawlerJobThreadNumber(1)
                .requestRecord(keyWordCrawler)
                .supportRecord(syncCrawlerRecord)
                .build();
    }


    public static CrawlerRequestRecord doArticleKeyWordSyncCrawler(){
        String site = "article_keywords";

        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPageItem)
                .domain(domain)
                .recordKey("https://www.pcauto.com.cn/sitemap/sitemap.html")
                .httpUrl("https://www.pcauto.com.cn/sitemap/sitemap.html")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.key)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(domain))
                .needParsed(false)
                .needWashed(true)
                .build();

        keywordRecord.tagsCreator().bizTags().addDomain(domain);
        keywordRecord.tagsCreator().bizTags().addSite(site);
        return keywordRecord;
    }

    /**
     * 发布太平洋文章列表kw trigger info
     * @param crawlerKey
     * @param crawlerTriggerKey
     */
    public static void publishPcAutoArticleKWCrawlerScheduleTriggerInfo(String crawlerKey,String crawlerTriggerKey){
        HttpPage httpPage = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerKey,
                crawlerTriggerKey,
                domain,
                CrawlerMetaConstant.ScheduleJobTrigger_Simple,
                "",
                "太平洋文章列表链接关键词"
        );

        System.out.println("添加采集调度信息：" + httpPage.getRawText());
        System.out.println("triggerKey: " + crawlerTriggerKey);
    }

    public static void publishCrawlerNodeScript(){
        HttpPage httpPage = scriptCommand.publishCrawlerScript2Consul(domain);

        System.out.println("发布采集任务脚本：" + httpPage.getRawText());
    }

}
