package com.chance.cc.crawler.development.bootstrap.bitauto;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;

import java.util.HashMap;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author lt
 * @version 1.0
 * @date 2020-12-07 09:50:13
 * @email okprog@sina.com
 */
public class BitAutoStart {
    private static final String domain = "bitauto";
    private static final String crawler_level = "trace";
    private static final String OrderType = "order_type";

    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        //H5168QRFNIU3804D
        //5F6B3610BB719FAA
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    public static void main(String[] args) {
        //设置https协议访问
        System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2,SSLv3");
        //关键词 source record
        CrawlerRequestRecord keywordCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("bitauto_keywords",turnPageItem)
                .httpUrl("http://192.168.1.215:9599//v1/meta/"+domain+"/keys?site=series")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();
        CrawlerRequestRecord indexRecord = CrawlerRequestRecord.builder()
                .startPageRequest("bitauto_index",turnPageItem)
                .httpUrl("https://www.bitauto.com/")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();
        CrawlerRequestRecord cityRecord = CrawlerRequestRecord.builder()
                .startPageRequest("bitauto_city",turnPageItem)
                .httpUrl("https://cmsapi.bitauto.com/city/getcity.ashx?requesttype=json&bizCity=1")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        //文章采集
        CrawlerRequestRecord articleCrawler = doArticleCrawler();
        //视频采集
        CrawlerRequestRecord videoCrawler = doVideoCrawler();
        CrawlerRequestRecord videoArticleCrawler = doVideoArticleCrawler();
        //论坛-最新发布采集
        CrawlerRequestRecord forumReleaseCrawler = doForumReleaseCrawler();
        //论坛-最新回采集
        CrawlerRequestRecord forumReplyCrawler = doForumReplyCrawler();
        CrawlerRequestRecord forumCrawler = doForumCrawler();
        //易车号-最新文章采集
        CrawlerRequestRecord haoArticleCrawler = doHaoArticleCrawler();

        //易车号-追溯回复采集
        CrawlerRequestRecord haoArticleCrawlerComment = doHaoArticleCommentTraceCrawler();
        CrawlerRequestRecord initCommentRecord = initCommentRecord(haoArticleCrawlerComment);

        //口碑采集
        CrawlerRequestRecord praiseCrawler = doPraiseCrawler();
        //经销商采集
        CrawlerRequestRecord dealerArticleCrawler = doDealerArticleCrawler();

        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .triggerInfo(domain, domain + "_trigger", System.currentTimeMillis(), domain + "_job")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain)) //内存队列
                .consoleResultPipeline() //控制台输t出
//                .fileResultPipeline("file_result", "D:\\chance\\data\\bitauto\\bitauto_forum.json", false)
                //.requestRecord(articleCrawler)  //more job
               .requestRecord(videoCrawler)
                ///.requestRecord(videoArticleCrawler)
//                .requestRecord(forumReleaseCrawler)
//                .requestRecord(forumReplyCrawler)
//                .requestRecord(forumCrawler)
//                .requestRecord(haoArticleCrawler)
//                .requestRecord(haoArticleCrawler)
//                .requestRecord(praiseCrawler)
//                .requestRecord(dealerArticleCrawler)
//                .supportRecord(keywordCrawlerRecord)
//                .supportRecord(indexRecord)
//                .supportRecord(cityRecord)
//                .crawlerThreadNum(20)
                .build("com.chance.cc.crawler.development.scripts.bitauto");
        //添加评论去重信息
//        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
//        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
//        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-",filter,domain,"comment")));
//        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 30,null));
//        articleCrawler.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
//        devCrawlerController.getCrawlerJob().getScheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        devCrawlerController.start();

//        String site = haoArticleCrawlerComment.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
//        String requestQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"queue");
//
//        //配置结果队列信息
//        String resultQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"result","queue");
//        CrawlerQueueConfig crawlerQueueConfig = new CrawlerQueueConfig(
//                resultQueueName,
//                CrawlerQueueConfig.Content.result,
//                CrawlerQueueConfig.Storage.redis,
//                CrawlerQueueConfig.Structure.list);
//
//        //同步到mysql callback record
//        CrawlerRequestRecord syncCrawlerRecord = CrawlerRequestRecord.builder()
//                .startPageRequest("bitauto_hao_article_sync", turnPageItem)
//                .httpUrl("http://192.168.1.215:9599/crawler/domain/common/api/v1/bitauto/search/results/sync?resultQueue="+resultQueueName)
//                .requestLabelTag(supportCallback)
//                .requestLabelTag(internalDownload)
//                .build();
//        DevCrawlerController.builder()
//                .triggerInfo(domain,domain + "_trigger",System.currentTimeMillis(),domain+ "_job")
//                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(requestQueueName))
//                .consoleResultPipeline()
//                .fileResultPipeline("D:\\chance\\data\\bitauto\\bitauto_hao.json",true)
//                .queueResultPipeline("redis_result",crawlerQueueConfig)
//                .requestRecord(haoArticleCrawler)
//                .supportRecord(syncCrawlerRecord)
//                .build();
//
//        DevCrawlerController.builder()
//                .triggerInfo(domain,domain + "_trigger",System.currentTimeMillis(),domain+ "_job")
//                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(requestQueueName))
//                .consoleResultPipeline()
////                .fileResultPipeline("D:\\chance\\data\\bitauto\\bitauto_hao_comment.json",true)
//                .requestRecord(haoArticleCrawlerComment)
//                .supportRecord(initCommentRecord)
//                .build();
    }

    public static CrawlerRequestRecord initCommentRecord(CrawlerRequestRecord startRequestRecord){
        CrawlerRequestRecord initRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(startRequestRecord)
                .httpUrl("https://hao.yiche.com/")
                .recordKey("https://hao.yiche.com/")
                .releaseTime(System.currentTimeMillis())
                .needWashed(true)
                .needParsed(true)
                .resultLabelTag(comment)
                .resultLabelTag(interaction)
                .copyBizTags()
                .build();
        String site = startRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String requestQueueName = StringUtils.joinWith("-","crawler",domain, site, crawler_level,"queue");
        CrawlerRequestRecord initCommentCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("bitauto_hao_article_comment",turnPageItem)
                .httpUrl("http://192.168.1.215:9599/crawler/domain/common/api/v1/bitauto/results/queue/init?requestQueueName="+requestQueueName)
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();
        HttpRequestBody jsonBody = HttpRequestBody.json(JSON.toJSONString(initRecord), "utf-8");
        initCommentCrawlerRecord.getHttpRequest().setMethod(HttpConstant.Method.POST);
        initCommentCrawlerRecord.getHttpRequest().setRequestBody(jsonBody);
        return initCommentCrawlerRecord;
    }


    public static CrawlerRequestRecord doArticleCrawler(){
        String site = "article";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .recordKey("https://news.yiche.com/xinchexiaoxi/20211026/0915028274.html")
                .httpUrl("https://news.yiche.com/xinchexiaoxi/20211026/0915028274.html")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1,null))
                .proxy(proxy)
                //.needParsed(false)
                //.needWashed(false)
                .build();
        //requestRecord.setDownload(false);
        //requestRecord.setSkipPipeline(true);

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

    public static CrawlerRequestRecord doVideoCrawler(){
        String site = "video";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                //.recordKey("https://v.yiche.com/")
                //.httpUrl("https://v.yiche.com/")
                .recordKey("https://vc.yiche.com/vplay/2906268.html")
                .httpUrl("https://vc.yiche.com/vplay/2906268.html")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*30,null))
                .proxy(proxy)
                .build();

//        requestRecord.tagsCreator().bizTags().addCustomKV("crawler_site","video");
        Map<String, Object> extras = new HashMap<>();
        extras.put("listUrl", "11");
        extras.put("itemUrl", "https://vc.yiche.com/vplay/2927589.html#no_date");
        requestRecord.getHttpRequest().setExtras(extras);
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }


    public static CrawlerRequestRecord doVideoArticleCrawler(){
        String site = "video";
        String url = "https://vc.yiche.com/vplay/2225872.html#no_date";
//        String url  = "https://vc.yiche.com/vplay/2342044.html#no_date";


        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, turnPageItem)
                .domain(domain)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.key)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*40,null))
                .resultLabelTag(article)
                .resultLabelTag(interaction)
                .needWashed(false)
                .proxy(proxy)
                .build();

//        requestRecord.tagsCreator().bizTags().addCustomKV("crawler_site","video");
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);

        Map<String,Object> extras = new HashMap<>();
        extras.put("listUrl","lastRequestUrl");
        extras.put("itemUrl",url);
        HttpRequest httpRequest = requestRecord.getHttpRequest();
        httpRequest.setExtras(extras);
        return requestRecord;
    }

    public static CrawlerRequestRecord doForumReleaseCrawler(){
        String site = "forum_release";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://www.bitauto.com/")
                .httpUrl("https://www.bitauto.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24,null))

                .proxy(proxy)
                .needParsed(false)
                .needWashed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

    public static CrawlerRequestRecord doForumReplyCrawler(){
        String site = "forum_reply";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://www.bitauto.com/")
                .httpUrl("https://www.bitauto.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24,null))
                .proxy(proxy)
                .needParsed(false)
                .needWashed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

    public static CrawlerRequestRecord doForumCrawler(){
        String site = "forum";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://www.bitauto.com/")
                .httpUrl("https://www.bitauto.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24,null))
                .proxy(proxy)
                .needParsed(false)
                .needWashed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);

        requestRecord.tagsCreator().bizTags().addCustomKV(OrderType,"1");
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(site);
        return requestRecord;
    }

    public static CrawlerRequestRecord doHaoArticleCrawler(){
        String site = "hao_article";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://hao.yiche.com/#file")
                .httpUrl("https://hao.yiche.com/#file")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*60,null))
                .proxy(proxy)
                .build();
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(site);
        requestRecord.tagsCreator().bizTags().addCustomKV("filePath","C:\\Users\\Administrator\\Documents\\chance\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\bitauto\\url.txt");
        return requestRecord;
    }

    public static CrawlerRequestRecord doHaoArticleCommentTraceCrawler(){
        String site = "hao_article";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://hao.yiche.com/#comment")
                .httpUrl("https://hao.yiche.com/#comment")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24,null))
                .proxy(proxy)
                .build();

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

    public static CrawlerRequestRecord doPraiseCrawler(){
        String site = "praise";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .recordKey("https://www.bitauto.com/")
                .httpUrl("https://www.bitauto.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .domain(domain)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*7,null))
                .proxy(proxy)
                .needParsed(false)
                .needWashed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);

        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

    public static CrawlerRequestRecord doDealerArticleCrawler(){
        String site = "dealer";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .recordKey("https://dealer.yiche.com/")
                .httpUrl("https://dealer.yiche.com/")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24,null))
                .proxy(proxy)
                .needParsed(false)
                .needWashed(false)
                .build();

        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        return requestRecord;
    }

    @Test
    public void testDealer(){
        HttpConfig httpConfig = HttpConfig.me("test");
        httpConfig.setProxy(proxy);
        httpConfig.setUseProxy(true);

        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());

        String url = "https://apicar.bitauto.com/CarInfo/getlefttreejson.ashx?tagtype=jingxiaoshang&citycode=tianjin&cityid=2601";
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(url);
        HttpPage httpPage = downloader.download(httpRequest, httpConfig);
        String rawText = httpPage.getRawText();
//        System.out.println(rawText);
        int start = rawText.indexOf("(");
        int end = rawText.lastIndexOf(")");
        String substring = rawText.substring(start + 1, end);
        Map resultMap = JSON.parseObject(substring, Map.class);
//        System.out.println(resultMap);
        JSONObject jsonObject = JSONObject.parseObject(JSON.toJSONString(resultMap));
        System.out.println(jsonObject);
    }
}
