package com.chance.cc.crawler.development.bootstrap.kaola;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.internalDownload;

public class KaolaStart {
    private static final String domain = "kaola";
    public static final String site = "commodity";

    private static Proxy proxy = new Proxy();

    static {
        //代理配置
        //H5168QRFNIU3804D
        //5F6B3610BB719FAA
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    public static void main(String[] args) {
        CrawlerRequestRecord crawlerRequestRecord = news();


        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest("amazon_series_keyword",turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/amazon/keys?site=commodity")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();


        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .triggerInfo(domain, domain + "_trigger", System.currentTimeMillis(), domain + "_job")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline("redis")
                .consoleResultPipeline("kafka")
                .fileResultPipeline("redis", "D:\\chance_log\\考拉redis-8-3_1.log", false)
                .fileResultPipeline("kafka", "D:\\chance_log\\考拉Kafka-8-3_1.log", false)
                .crawlerThreadNum(30)
                .supportRecord(keywordRecord)
                .requestRecord(crawlerRequestRecord)
                .build("com.chance.cc.crawler.development.scripts.kaola");
        devCrawlerController.start();
    }

    public static CrawlerRequestRecord news() {

        String url = "https://www.kaola.com";
//        String url = "https://m-search.kaola.com/goods/search.html?key=nike&zp=input&zn=h5Search";

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(url)
                .recordKey(url)
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 1, null))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .proxy(proxy)
                .build();
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(site);

        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();//过滤爬虫记录
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);//根据key和时间过滤
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-", filter, domain, "comment")));//内存过滤
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 3650, null));//时间范围过滤
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));//自定义标签

        return crawlerRequestRecord;
    }

}

