package com.chance.cc.crawler.development.bootstrap.xiaohongshu;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.utils.RedisReader;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import java.util.List;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRecordFilter.keyOrDateRange;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.filter;

/**
 * @author lt
 * @version 1.0
 * @date 2021-06-17 14:39:48
 * @email okprog@sina.com
 */
public class XhsSendItemRecord2Redis {

    public static final String domain = "xhs";
    public static final String site = "user_notes";
    public static final String siteBiz = "notes";
    public static final RedisReader redisReader = new RedisReader("192.168.1.214",6379,3);
    public static final String wxappArticleUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/single_feed";

    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }
    HttpConfig httpConfig = HttpConfig.me(domain);

    public static void main(String[] args) throws Exception {
        send2Redis("crawler-dev-bootstrap/src/main/java/com/chance/cc/crawler/development/bootstrap/xiaohongshu/xhs-urls.txt");
    }

    public static void send2Redis(String filePath) throws Exception{
        List<String> localUrls = IOUtils.readLines(new FileInputStream(filePath), StandardCharsets.UTF_8);
        for (String localUrl : localUrls) {
            try {
                String articleId = localUrl.split("item/")[1];
                String articleUrl = String.format(wxappArticleUrlFormat,articleId);

                CrawlerRequestRecord requestRecord = genCrawlerArticleRecord(articleUrl);

                String requestKey = requestRecord.getRecordKey();
                //确保key值唯一
                String shaKey = DigestUtils.sha1Hex(System.currentTimeMillis()+requestKey);

                redisReader.mapPush(StringUtils.joinWith("-", "crawler", domain, site, "article_test", "queue_map"),shaKey, JSON.toJSONString(requestRecord));
                redisReader.listPush(StringUtils.joinWith("-", "crawler", domain, site, "article_test", "queue_list"),shaKey);
                System.out.println("add key : " + shaKey + " success!");
            } catch (Exception e) {
                System.out.println("*----------------*" + localUrl);
                e.printStackTrace();
            }
        }
    }

    private static CrawlerRequestRecord genCrawlerArticleRecord(String articleUrl) {
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPageItem)
                .httpUrl(articleUrl)
                .recordKey(articleUrl)
                .releaseTime(System.currentTimeMillis())
                .notFilterRecord()
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .needParsed(true)
                .needWashed(true)
                .proxy(proxy)
                .build();
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);

        //添加评论去重信息
        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        filterCrawlerRecord.addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-",filter,domain,site,"comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24*365*10,null));
        requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));
        requestRecord.tagsCreator().scheduleTags().getCategoryTag().addLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal());
        return requestRecord;
    }
}
