package com.chance.cc.crawler.development.bootstrap.toutiao;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.Downloader;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import com.chance.cc.crawler.development.utils.RedisReader;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;

import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-04-07 11:04:27
 * @email okprog@sina.com
 */
public class TouTiaoArticleSimpleStart {

    public static final String domain = "toutiao";
    public static final String site = "searchkw";
    public static final String kwSite = "search_keyword";
    public static final String siteBiz = "news-release";
    private static final String IS_FILTER_ARTICLE = "is_filter_article";
    public static final RedisReader redisReader = new RedisReader("192.168.1.215", 6379, 4);//

    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        //HL89Q19E86E2987D
        //71F33D94CE5F7BF2
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    public static void main(String[] args) throws Exception {
        send2Redis("文件地址");//
    }

    public static void send2Redis(String filePath) throws Exception {
        List<String> localUrls = IOUtils.readLines(new FileInputStream(filePath), StandardCharsets.UTF_8);
        for (String localUrl : localUrls) {
            try {
                String articleId = localUrl.split("item/")[1];
//                String articleUrl = String.format(iqyArticleUrlFormat, articleId);//
                String articleUrl ="";
                CrawlerRequestRecord requestRecord = genCrawlerArticleRecord(articleUrl);

                String requestKey = requestRecord.getRecordKey();
                //确保key值唯一
                String shaKey = DigestUtils.sha1Hex(System.currentTimeMillis() + requestKey);
                redisReader.mapPush(StringUtils.joinWith("-", "crawler", domain, site, "simple", "queue_map"), shaKey, JSON.toJSONString(requestRecord));
                redisReader.listPush(StringUtils.joinWith("-", "crawler", domain, site, "simple", "queue_list"), JSON.toJSONString(requestRecord));
//                System.out.println("add key : " + shaKey + " success!");

            } catch (Exception e) {
                System.out.println("*----------------*" + localUrl);
                e.printStackTrace();
            }
        }
    }

    private static CrawlerRequestRecord genCrawlerArticleRecord(String articleUrl) {
        FilterInfo turnPageFilterInfo = new FilterInfo();
        turnPageFilterInfo.setFilter(CrawlerEnum.CrawlerRecordFilter.count);
        turnPageFilterInfo.setCountTotalNum(1000);
        turnPageFilterInfo.setCurCount(0);



        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPageItem)
//                .domain(domain)
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 30, null))
                .httpUrl(articleUrl)
                .recordKey(articleUrl)//
                .releaseTime(System.currentTimeMillis())
                .notFilterRecord()
                .turnPageFilterInfo(turnPageFilterInfo)//
//                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .needParsed(true)
                .needWashed(true)
                .proxy(proxy)
                .build();
        requestRecord.setDownload(false);//
        requestRecord.setSkipPipeline(true);//
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);
        requestRecord.tagsCreator().bizTags().getCategoryTag().addKVTag(IS_FILTER_ARTICLE,true);
        Map<String,String> keysMap = new HashMap<>();
        keysMap.put("finance","3189399007");
        keysMap.put("sports","3189398957");
        Map<String,Object> extras = new HashMap<>();
        extras.put("keysMap",keysMap);
        requestRecord.getHttpRequest().setExtras(extras);


//        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-", filter, domain, site, "comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 365 * 10, null));
        requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
        return requestRecord;
    }
}
