package com.chance.cc.crawler.development.bootstrap.taobao;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.lang3.StringUtils;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author bx
 * @date 2020/11/18 0018 18:13
 */
public class TaobaoSearchHtmlStart {

//    private static final String searchUrl = "https://s.taobao.com/search?ajax=true&callback=jsonp764&q=%s" +
//            "&ie=utf8&js=1&stats_click=search_radio_all%%3A1&p4ppushleft=1%%2C48";
    private static final String searchUrl = "https://s.taobao.com/search";

    public static final String domainId = "taobao";

    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HEW657EL99F83S9D");
        proxy.setPassword("8916B1F3F10B1979");
    }

    public static void main(String[] args) {

        FilterInfo countFilterInfo = new FilterInfo();
        countFilterInfo.setFilter(CrawlerEnum.CrawlerRecordFilter.count);
        countFilterInfo.setCountTotalNum(100);
        countFilterInfo.setCurCount(0);

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, turnPage)
                .httpUrl(searchUrl)
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.key)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-","filter",domainId,"search","queue")))
                .turnPageFilterInfo(countFilterInfo)
                .needWashed(false)
                .needParsed(false)
                .proxy(proxy)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);

        String resultQueueName = "taobao_search_result";
//        CrawlerQueueConfig crawlerQueueConfig = new CrawlerQueueConfig(
//                resultQueueName,
//                CrawlerQueueConfig.Content.result,
//                CrawlerQueueConfig.Storage.redis,
//                CrawlerQueueConfig.Structure.list);

        //关键词 source record
        CrawlerRequestRecord keywordCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("taobao_keywords", turnPageItem)
                .httpUrl("http://192.168.1.215:9599/v1/meta/taobao/keys")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        //user oauth infos source record
        CrawlerRequestRecord userOauthInfoCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("taobao_user_oauth_infos", turnPageItem)
                .httpUrl("http://192.168.1.215:9599/crawler/oauth/api/v1/taobao/userOauthInfos")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        //user agents source record
        CrawlerRequestRecord userAgentsCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("taobao_user_agents", turnPageItem)
                .httpUrl("https://fake-useragent.herokuapp.com/browsers/0.1.11")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();
//
//        //商品信息同步到mysql callback record
//        CrawlerRequestRecord syncCrawlerRecord = CrawlerRequestRecord.builder()
//                .startPageRequest("taobao_sync", turnPageItem)
//                .httpUrl("http://localhost:9599/v1/meta/taobao/search/results/sync?resultQueue=" + resultQueueName + "&domain=" + domainId)
//                .requestLabelTag(supportCallback)
//                .requestLabelTag(internalDownload)
//                .build();

        DevCrawlerController.builder()
                .triggerInfo(domainId,domainId,System.currentTimeMillis(),domainId)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domainId))
                .consoleResultPipeline()
//                .queueResultPipeline(crawlerQueueConfig)//redis pipeline result
                .fileResultPipeline("D:\\chance\\log\\tets.log",false)
                .requestRecord(requestRecord)
                .supportRecord(keywordCrawlerRecord)
                .supportRecord(userOauthInfoCrawlerRecord)
                .supportRecord(userAgentsCrawlerRecord)
//                .supportRecord(syncCrawlerRecord)
                .build().start();
    }
}
