package com.chance.cc.crawler.development.bootstrap.xcar.xCarVideoStart;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.queue.crawler.CrawlerQueueConfig;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @ClassName XCarVideoStart
 * @Description TODO
 * @Author songding
 * @Date 2021/8/18 9:21
 * @Version 1.0
 * 爱卡车系 视频采集  入口地址：https://newcar.xcar.com.cn/1468/
 * 每日采集七日内文章、互动量、昨日全量回复
 * 采集板块：车系视频板块
 **/
public class XCarVideoStart {
    private static final String DOMAIN = "xcar";
    private static final String SIDE = "Video";
    private static Proxy proxy = new Proxy();

    static {
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("H7307T4706B25G4D");
        proxy.setPassword("05B4877CC39192C0");
    }

    public static void main(String[] args) {
        //全量车系视频采集


        //xcarArticle();//采集视频文章链接
        //Article();//采集视频文章 互动量  评论
       // xcarArticleJob();
        xcarArticleAll();
    }
    public static void xcarArticleAll() {
        String url = "https://newcar.xcar.com.cn/";
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpConfig(HttpConfig.me(DOMAIN))
                .httpUrl(url)
                .domain(DOMAIN)
                .proxy(proxy)
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24, null))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                //控制链接的累计
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        record.tagsCreator().bizTags().addDomain(DOMAIN);
        record.tagsCreator().bizTags().addSite(SIDE);
        record.tagsCreator().bizTags().addSiteBiz(SIDE);
        record.tagsCreator().bizTags().addCustomKV("total", "total");//控制是否累计链接

        // TODO 关键字传入  需要传入
        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/xcar/keys?site=carSeries")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 , null));
        //以json转换保证数据传输的完整性
        record.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));



        DevCrawlerController.builder()
                .triggerInfo(DOMAIN, DOMAIN, System.currentTimeMillis(), DOMAIN)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(DOMAIN))
                .consoleResultPipeline("kafka")
                .consoleResultPipeline("redis")
                .fileResultPipeline("redis", "F:\\chance_log\\xcar\\video\\video.log", false)
                .fileResultPipeline("kafka", "F:\\chance_log\\xcar\\video\\videoUrl.log", false)
                .requestRecord(record)
                .supportRecord(keywordRecord)
                .build("com.chance.cc.crawler.development.scripts.xcar")
                .start();

    }
    public static void xcarArticle() {
        String url = "https://newcar.xcar.com.cn/21/";
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpConfig(HttpConfig.me(DOMAIN))
                .httpUrl(url)
                .domain(DOMAIN)
                .proxy(proxy)
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24, null))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                //控制链接的累计
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        record.tagsCreator().bizTags().addDomain(DOMAIN);
        record.tagsCreator().bizTags().addSite(SIDE);
        record.tagsCreator().bizTags().addSiteBiz(SIDE);
        record.tagsCreator().bizTags().addCustomKV("total", "total");//控制是否累计链接

        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 , null));
        //以json转换保证数据传输的完整性
        record.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));



        DevCrawlerController.builder()
                .triggerInfo(DOMAIN, DOMAIN, System.currentTimeMillis(), DOMAIN)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(DOMAIN))
                .consoleResultPipeline("kafka")
                .consoleResultPipeline("redis")
                .fileResultPipeline("redis", "F:\\chance_log\\xcar\\video\\video.log", false)
                .fileResultPipeline("kafka", "F:\\chance_log\\xcar\\video\\videoUrl.log", false)
                .requestRecord(record)
                .build("com.chance.cc.crawler.development.scripts.xcar")
                .start();

    }

    private static void Article() {
        String url = "https://xtv.xcar.com.cn/show/202101/id_228121.html";
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpConfig(HttpConfig.me(DOMAIN))
                .httpUrl(url)
                .domain(DOMAIN)
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7, null))
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                //控制链接的累计
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .proxy(proxy)
                .build();
        record.setNeedWashPage(true);
        record.tagsCreator().bizTags().addDomain(DOMAIN);
        record.tagsCreator().bizTags().addSite(SIDE);
        record.tagsCreator().bizTags().addSiteBiz(SIDE);

        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24, null));
        //以json转换保证数据传输的完整性
        record.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));


        DevCrawlerController.builder()
                .triggerInfo(DOMAIN, DOMAIN, System.currentTimeMillis(), DOMAIN)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(DOMAIN))
                .consoleResultPipeline("kafka")
                //.consoleResultPipeline("redis")
                //.fileResultPipeline("redis","F:\\chance_log\\ak\\seriesArticle\\addArticleUrl.log",false)
                .fileResultPipeline("kafka", "F:\\chance_log\\ak\\seriesArticle\\videoAll.log", false)
                .requestRecord(record)
                .build("com.chance.cc.crawler.development.scripts.xcar")
                .start();
    }

    //将累计的链接流向 mysql
    public static CrawlerJob xcarArticleJob() {
        String url = "https://newcar.xcar.com.cn/21/";
        String sideInfo = "video";
        String sideBiz = "new_releaseTime";
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpConfig(HttpConfig.me(DOMAIN))
                .httpUrl(url)
                .domain(DOMAIN)
                .proxy(proxy)
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 30 * 12, null))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                //控制链接的累计
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                //.resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();
        record.tagsCreator().bizTags().addDomain(DOMAIN);
        record.tagsCreator().bizTags().addSite(SIDE);
        record.tagsCreator().bizTags().addSiteBiz(sideBiz);
        record.tagsCreator().bizTags().addCustomKV("total", "total");//控制是否累计链接

        record.tagsCreator().bizTags().addCustomKV("persistence", "persistence");//存入mysql进行数据持久化
        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 30 * 12, null));
        //以json转换保证数据传输的完整性
        record.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));

        //每条视频链接
        String resultPostQueueName = "xcar_video_url";
        CrawlerQueueConfig crawlerQueueConfigName = new CrawlerQueueConfig(
                resultPostQueueName,
                CrawlerQueueConfig.Content.result,//返回结果
                CrawlerQueueConfig.Storage.redis,//中转地址
                CrawlerQueueConfig.Structure.list//数据格式
        );

        //每条视频链接持久化操作
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest("xcar_push_result", CrawlerEnum.CrawlerRequestType.turnPageItem)
                .httpUrl("http://192.168.1.215:9599/crawler/domain/common/api/v1/" + DOMAIN + "/search/results/sync?"
                        + "resultQueue=crawler_result_queue_XCAR_video&site=" + SIDE)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.supportCallback)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();

        /*DevCrawlerController.builder()
                .triggerInfo(DOMAIN, CrawlerMetaConstant.ScheduleJobTrigger_Cron, System.currentTimeMillis(),
                        StringUtils.joinWith("-", SIDE, sideInfo,
                                sideBiz, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(DOMAIN))
                .queueResultPipeline("mysql", crawlerQueueConfigName)
                .consoleResultPipeline("redis")
                .fileResultPipeline("redis", "F:\\chance_log\\ak\\seriesArticle\\UrlMYSQL.log", false)
                .requestRecord(record)
                .supportRecord(crawlerRequestRecord)
                .build("com.chance.cc.crawler.development.scripts.xcar")
                .start();*/


        return CrawlerJob.builder()
                .triggerInfo(DOMAIN, CrawlerMetaConstant.ScheduleJobTrigger_Cron//定时标签
                        , System.currentTimeMillis(), StringUtils.joinWith("-", SIDE, sideInfo,
                                sideBiz, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .queueResultPipeline("mysql", crawlerQueueConfigName)
                .kafkaResultPipeline("kafka", "news", null)
                .fileResultPipeline("redis","F:\\chance_log\\ak\\seriesArticle\\UrlMYSQL.log",false)
                .requestRecord(record)
                .supportRecord(crawlerRequestRecord)
                .build();
    }




    @Test
    public void test(){
        String id = "id_228121.html";
        System.out.println(id.substring(3, id.length() - 5));
    }
}
