package com.chance.cc.crawler.development.bootstrap.douyin;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import com.chance.cc.crawler.development.scripts.douyin.api.DYAccountCrawlerScript;
import com.chance.cc.crawler.development.scripts.douyin.api.DYSearchCrawlerScript;
import org.apache.commons.lang3.StringUtils;

import java.io.IOException;
import java.util.List;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.bootstrap.douyin.DYTest.commentImport;

public class DouyinStart {

    private static final String domainId = "api-dy";
    private static final Proxy proxy = new Proxy();
    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }
    public static void main(String[] args) throws IOException {
        testDeviceRegister();
//        testSearch();
//        testAccount();
//        testCustomAccount();
//        System.out.println(JSON.toJSONString(proxy));
    }

    private static void testSearch(){
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl(DYSearchCrawlerScript.searchStartUrl)
                .httpHead("User-Agent", "com.ss.android.ugc.aweme/150301 (Linux; U; Android 8.0.0; zh_CN; MI 5s; Build/OPR1.170623.032; Cronet/TTNetVersion:539f4bcf 2021-01-18 QuicVersion:47946d2a 2020-10-14)")
                .httpHead("Connection", "Keep-Alive")
                .httpHead("Host", "search3-search-lf.amemv.com")
                .httpHead("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*7,null))
                .proxy(proxy)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domainId);
        requestRecord.tagsCreator().bizTags().addSite(DYSearchCrawlerScript.site);
        requestRecord.tagsCreator().bizTags().addCustomKV("job_flag","daily");

        CrawlerRecord commentFilterCrawlerRecord = new CrawlerRecord();
        commentFilterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        commentFilterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24*7,null));
//        commentFilterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo("filter-dy-api-account-comment-queue"));
        requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(commentFilterCrawlerRecord));


//        //初始 comment crawler request record
        String requestQueueName = StringUtils.joinWith("-","crawler",domainId,DYSearchCrawlerScript.site,"queue");
        CrawlerRequestRecord initNewsCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dy-search",turnPageItem)
                .httpUrl("http://192.168.1.215:9599/v1/meta/douyin/keys?site=kw")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        CrawlerRequestRecord userOauthInfoCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dy_oauth_infos", turnPageItem)
                .httpUrl("http://192.168.1.215:9599/crawler/oauth/api/v1/douyin/userOauthInfos")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();


        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .crawlerThreadNum(1)
                .triggerInfo(domainId, domainId, System.currentTimeMillis(), domainId)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(requestQueueName))
                .consoleResultPipeline()
                .fileResultPipeline("D:\\chance\\log\\dy\\dy_article.log", false)
                .requestRecord(requestRecord)
                .supportRecord(initNewsCrawlerRecord)
                .supportRecord(userOauthInfoCrawlerRecord)
                .build();
        devCrawlerController.start();
    }


    private static void testAccount(){

        long[] dateRange = new long[2];
        dateRange[0]=1609430400000L;
        dateRange[1]=1640966400000L;

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl(DYAccountCrawlerScript.accountUrl)
                .httpHead("User-Agent", "com.ss.android.ugc.aweme/140101 (Linux; U; Android 8.0.0; zh_CN; MI 5s; Build/OPR1.170623.032; Cronet/TTNetVersion:1c8b77ac 2020-12-16 QuicVersion:47946d2a 2020-10-14)")
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
//                .addFilterInfo(FilterUtils.redisFilterKeyInfo(domainId))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*7+3,null))
                .proxy(proxy)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domainId);
        requestRecord.tagsCreator().bizTags().addSite(DYAccountCrawlerScript.site);

        CrawlerRecord commentFilterCrawlerRecord = new CrawlerRecord();
        commentFilterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        commentFilterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(25+3,null));
        requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(commentFilterCrawlerRecord));


        //初始 comment crawler request record
        CrawlerRequestRecord initCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dy-account",turnPageItem)
                .httpUrl("http://192.168.1.215:9599/v1/meta/douyin/keys?site=api_account")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        CrawlerRequestRecord userOauthInfoCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dy_oauth_infos", turnPageItem)
                .httpUrl("http://192.168.1.215:9599/crawler/oauth/api/v1/douyin/userOauthInfos")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .crawlerThreadNum(1)
                .triggerInfo(domainId, domainId, System.currentTimeMillis(), domainId)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(StringUtils.joinWith("-", "crawler", domainId, DYAccountCrawlerScript.site, "queue")))
//                .consoleResultPipeline()
                .fileResultPipeline("D:\\chance\\log\\dy\\dy_account.log", false)
                .requestRecord(requestRecord)
                .supportRecord(initCrawlerRecord)
                .supportRecord(userOauthInfoCrawlerRecord)
                .build();
        devCrawlerController.start();
    }

    private static void testComment() throws IOException {

        List<CrawlerRequestRecord> crawlerRequestRecords = commentImport();
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl("https://api5-normal-c-lf.amemv.com/aweme/v2/comment/list/")
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domainId);
        requestRecord.tagsCreator().bizTags().addSite("api-comment");



        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .crawlerThreadNum(1)
                .triggerInfo(domainId, domainId, System.currentTimeMillis(), domainId)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue("crawler-api-dy-api-comment-queue"))
                .consoleResultPipeline()
                .requestRecord(crawlerRequestRecords.get(0))
                .build();
        devCrawlerController.start();
    }

    private static void testDeviceRegister() throws IOException {
//        RedisReader redisReader = new RedisReader("192.168.1.214",6379,3);
//        CrawlerRequestRecord requestRecord = redisReader.recordPop("crawler-api-dy-api-devices-queue_list", "crawler-api-dy-api-devices-queue_map");
//        CrawlerRequestRecord requestRecord1 = redisReader.recordPop("crawler-api-dy-api-devices-queue_list", "crawler-api-dy-api-devices-queue_map");
//        CrawlerRequestRecord requestRecord2 = redisReader.recordPop("crawler-api-dy-api-devices-queue_list", "crawler-api-dy-api-devices-queue_map");

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl("https://start/dy/devices/register")
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(true)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domainId);
        requestRecord.tagsCreator().bizTags().addSite("api-device");
        requestRecord.tagsCreator().bizTags().addCustomKV("domain_result_json",
                "{\"serialno\": \"38a0d83b4ac2d515\", \"android\": \"2d51538a0d83b4ac\", \"mac\": \"08:00:27:ef:c9:e4\"}");



        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .crawlerThreadNum(1)
                .triggerInfo(domainId, domainId, System.currentTimeMillis(), domainId)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue("crawler-api-dy-api-comment-queue"))
                .consoleResultPipeline()
                .requestRecord(requestRecord)
                .build("com.chance.cc.crawler.development.scripts.douyin.api");
        devCrawlerController.start();
    }
}
