package com.chance.cc.crawler.development.bootstrap.xiaohongshu;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.queue.crawler.CrawlerQueueConfig;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import com.chance.cc.crawler.development.scripts.xiaohongshu.applets.XHSAppletsAccountCrawlerScript;
import com.chance.cc.crawler.development.utils.RedisReader;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;

import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.core.script.crawler.CrawlerScript.$_type;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Keword;
import static com.chance.cc.crawler.development.scripts.xiaohongshu.applets.XHSAppletsArticleCrawlerScript.noteUrlFormat;
import static com.chance.cc.crawler.development.scripts.xiaohongshu.applets.XHSAppletsCommentCrawlerScript.noteCommentUrlFormat;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

public class XHSStart {

    private static final String domainId = "xhs";
    public static final String site= "search-article";
    public static final String site_biz= "search";
    public static final String commentStartUrl = "https://start/fe_api/burdock/weixin/v2/notes/start/comments/start";
    public static final String wxappArticleUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/single_feed";

    public static int listCount = 0;
    public static int writeToRedisCount = 0;
    public static int nonWriteToRedisCount = 0;


    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HEW657EL99F83S9D");
        proxy.setPassword("8916B1F3F10B1979");
    }

    private static List<String> articleIds = new ArrayList<>();

    public static void main(String[] args) {

//        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
//                .startPageRequest(domainId, turnPage)
//                .httpUrl("/aweme/v1/general/search/single")
//                .releaseTime(System.currentTimeMillis())
//                .filter(CrawlerEnum.CrawlerRecordFilter.key)
//                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domainId))
//                .needWashed(false)
//                .needParsed(false)
//                .build();
//        requestRecord.setDownload(false);
//        requestRecord.setSkipPipeline(true);


//        testMatch();
//        copyRedisData();
//        copyRedisDataSet();
//        cleanData();
//        popularity();
//        copyRedisData();
//        timeComment();
//        test();
//        timeComment();
        userNotes();
    }

    private static void timeArticle(){
//        RedisReader redisReader = new RedisReader("192.168.1.214",6379,3);
//        CrawlerRequestRecord crawlerRequestRecord = redisReader.recordPop(
//                "crawler-xhs-xhs-article-queue_list", "crawler-xhs-xhs-article-queue_map");
//        while (crawlerRequestRecord==null){
//            crawlerRequestRecord = redisReader.recordPop(
//                    "crawler-xhs-xhs-article-queue_list", "crawler-xhs-xhs-article-queue_map");
//        }
//

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, turnPageItem)
                .httpUrl(String.format(noteUrlFormat,"611bd121000000002103a965"))
                .releaseTime(System.currentTimeMillis())
                .needWashed(true)
                .needParsed(true)
                .build();
        requestRecord.tagsCreator().bizTags().addDomain("xhs");
        requestRecord.tagsCreator().bizTags().addSite("xhs-bd-article");
        requestRecord.tagsCreator().bizTags().addCustomKV("keyword","英雄联盟");

        CrawlerQueueConfig crawlerQueueConfig = DevCrawlerController.devRequestQueue(StringUtils.joinWith("-",  domainId,"article","accumulate", "queue"));
        crawlerQueueConfig.setQueueStructure(CrawlerQueueConfig.Structure.map);
        DevCrawlerController.builder()
                .triggerInfo(domainId,"cron",System.currentTimeMillis(),"realtime")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue"))) //内存队列
                .consoleResultPipeline("article_result") //控制台输t出
                .consoleResultPipeline("article_accumulate") //控制台输t出
                .consoleResultPipeline("article_time") //控制台输t出
                .consoleResultPipeline("article_ids") //控制台输t出
//                .fileResultPipeline("article_result","D:\\chance\\log\\xhs.json",false)
                .queueResultPipeline("article_accumulate",crawlerQueueConfig)
                .requestRecord(requestRecord)
//                .requestRecord(crawlerRequestRecord)
                .build("com.chance.cc.crawler.development.scripts.xiaohongshu")
                .start();
    }



    private static void accountNotes(){

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, turnPageItem)
                .httpUrl(XHSAppletsAccountCrawlerScript.startListUrl)
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .build();
        requestRecord.setSkipPipeline(false);
        requestRecord.setDownload(false);
        requestRecord.tagsCreator().bizTags().addDomain("xhs");
        requestRecord.tagsCreator().bizTags().addSite(XHSAppletsAccountCrawlerScript.site);


        CrawlerQueueConfig crawlerQueueConfig = DevCrawlerController.devRequestQueue(StringUtils.joinWith("-",  domainId,"article","accumulate", "queue"));
        crawlerQueueConfig.setQueueStructure(CrawlerQueueConfig.Structure.map);
        DevCrawlerController.builder()
                .triggerInfo(domainId,"cron",System.currentTimeMillis(),"realtime")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue"))) //内存队列
                .consoleResultPipeline("article_result") //控制台输t出
                .consoleResultPipeline("article_accumulate") //控制台输t出
                .consoleResultPipeline("article_time") //控制台输t出
                .consoleResultPipeline("article_ids") //控制台输t出
//                .fileResultPipeline("article_result","D:\\chance\\log\\xhs.json",false)
                .queueResultPipeline("article_accumulate",crawlerQueueConfig)
                .requestRecord(requestRecord)
//                .requestRecord(crawlerRequestRecord)
                .build("com.chance.cc.crawler.development.scripts.xiaohongshu")
                .start();
    }

    private static void userNotes(){
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl(XHSAppletsAccountCrawlerScript.startListUrl)
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*7,null))
                .proxy(proxy)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domainId);
        requestRecord.tagsCreator().bizTags().addSite(XHSAppletsAccountCrawlerScript.site);


        CrawlerRequestRecord userKeyWordCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("xhs_user_note_kw", turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/xhs/keys?site=user_ids")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        CrawlerQueueConfig crawlerQueueConfig = DevCrawlerController.devRequestQueue(StringUtils.joinWith("-",  domainId,"user","notes", "queue"));
        DevCrawlerController.builder()
                .triggerInfo(domainId,"cron",System.currentTimeMillis(),"realtime")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue"))) //内存队列
                .consoleResultPipeline() //控制台输t出
//                .queueResultPipeline("article_ids",crawlerQueueConfig)
                .fileResultPipeline("account_article_ids","E:\\data\\logs\\xhs\\xhs_account.log",false)
                .requestRecord(requestRecord)
                .supportRecord(userKeyWordCrawlerRecord)
                .build("com.chance.cc.crawler.development.scripts.xiaohongshu")
                .start();
    }

    private static void timeComment(){
//        RedisReader redisReader = new RedisReader("192.168.1.214",6379,3);
//        CrawlerRequestRecord crawlerRequestRecord = redisReader.recordPop(
//                "crawler-xhs-search-comment-queue_list", "crawler-xhs-search-comment-queue_map");

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId, turnPageItem)
                .httpUrl(String.format(noteCommentUrlFormat,"6137292a000000002103862e",""))
                .releaseTime(System.currentTimeMillis())
                .needWashed(true)
                .needParsed(true)
                .build();
        requestRecord.tagsCreator().bizTags().addDomain("xhs");
        requestRecord.tagsCreator().bizTags().addSite("xhs-bd-comment");
        requestRecord.tagsCreator().bizTags().addCustomKV("keyword","英雄联盟");

        CrawlerQueueConfig crawlerQueueConfig = DevCrawlerController.devRequestQueue(StringUtils.joinWith("-",  domainId,"article","accumulate", "queue"));
        crawlerQueueConfig.setQueueStructure(CrawlerQueueConfig.Structure.map);
        DevCrawlerController.builder()
                .triggerInfo(domainId,"cron",System.currentTimeMillis(),"realtime")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue"))) //内存队列
                .consoleResultPipeline() //控制台输t出
                .fileResultPipeline("article_comment", "D:\\chance\\log\\xhs\\xhs_comment.log", false)
                .queueResultPipeline("article_accumulate",crawlerQueueConfig)
                .requestRecord(requestRecord)
                .build("com.chance.cc.crawler.development.scripts.xiaohongshu")
                .start();
    }

    private static void popularity(){
        RedisReader redisReader = new RedisReader("192.168.1.215",6379,2);
        CrawlerRequestRecord crawlerRequestRecord = redisReader.recordPop("crawler-xhs-queue_list", "crawler-xhs-queue_map");
//
        CrawlerRequestRecord userOauthInfoCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("xhs_user_oauth_infos", turnPageItem)
                .httpUrl("http://192.168.1.215:9599/crawler/oauth/api/v1/xhs/userOauthInfos")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domainId);
        crawlerRequestRecord.tagsCreator().bizTags().addSite("search-popularity");

        DevCrawlerController.builder()
                .triggerInfo(domainId,"cron",System.currentTimeMillis(),"realtime")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(StringUtils.joinWith("-","crawler",domainId,"queue"))) //内存队列
                .consoleResultPipeline() //控制台输t出
                .fileResultPipeline("D:\\chance\\log\\dy.json",false)
                .requestRecord(crawlerRequestRecord)
                .supportRecord(userOauthInfoCrawlerRecord)
                .build()
                .start();
    }

    private static void testMatch(){
        String appSearchUrlRegular = "https://\\S*/api/sns/v10/search/notes[\\s\\S]*";

//        String requestUrl = "https://www.xiaohongshu.com/api/sns/v10/search/notes?keyword=鞋子,帽子,新年去哪玩,穿搭,卫衣,连衣裙,泡泡袖,豹纹,显脸小,裙子,pull & bear怎么样,波丝可怎么样,h&m怎么样,斯凯奇怎么样,spao怎么样,zara怎么样,羊腿,素颜,urban revivo怎么样,优衣库怎么样,glam ever怎么样,炒蛋,芭比怎么样,圣诞节去哪玩,我的新年穿搭,时尚,穿搭";
        String requestUrl = "https://www.xiaohongshu.com/api/sns/v10/search/notes?keyword=穿搭";
        Pattern pattern=Pattern.compile(appSearchUrlRegular);
        System.out.println(pattern.matcher(requestUrl).matches());
    }

    private static void copyRedisData(){
        JedisPoolConfig config=new JedisPoolConfig(); // 连接池的配置对象
        config.setMaxTotal(100); // 设置最大连接数
        config.setMaxIdle(10); // 设置最大空闲连接数
        JedisPool jedisPool=new JedisPool(config,"192.168.1.227",6379);

        Jedis jedis=jedisPool.getResource(); // 获取连接
        jedis.auth("chance123"); // 设置密码
        jedis.select(1);

        Jedis jedis1=jedisPool.getResource(); // 获取连接
        jedis1.auth("chance123"); // 设置密码
        jedis1.select(2);


        String currentTime = DateFormatUtils.format(System.currentTimeMillis(),"yyyyMMdd");
        Set<String> keys = jedis.hkeys("redis_map_pdd_product_sub_list");
        for (String key : keys) {
            String value = jedis.hget("redis_map_pdd_product_sub_list", key);
            jedis1.hset("redis_map_pdd_product_sub_list_"+currentTime, key,value);
        }

        Set<String> details = jedis.hkeys("redis_map_pdd_product_detail_info");
        for (String detail : details) {
            String value = jedis.hget("redis_map_pdd_product_detail_info", detail);
            jedis1.hset("redis_map_pdd_product_detail_info_"+currentTime, detail,value);
        }

        Set<String> malls = jedis.hkeys("redis_map_pdd_mall_collect_list");
        for (String mall : malls) {
            String value = jedis.hget("redis_map_pdd_mall_collect_list", mall);
            jedis1.hset("redis_map_pdd_mall_collect_list_"+currentTime, mall,value);
        }

//        List<String> allKeys = jedis.lrange("crawler-tb-search-detail-queue_list", 0, -1);
//
//        for (String key : allKeys) {
//            String record = jedis.hget("crawler-tb-search-detail-queue_map", key);
//
//            jedis1.hset("crawler-tb-search-detail-queue_map", key,record);
//            jedis1.lpush("crawler-tb-search-detail-queue_list",key);
//        }

    }

    private static void copyRedisDataSet(){
        RedisReader redisReader = new RedisReader("192.168.1.214", 6379, 3);
        Jedis jedis=new Jedis("192.168.1.214", 6379);
        jedis.select(3);

        Set<String> smembers = jedis.smembers("filter-xhs-search-comment-queue");
        for (String smember : smembers) {
            jedis.sadd("filter-xhs-xhs-comment-queue",smember);
        }


    }

    public static void washMalls() throws IOException {
        JedisPoolConfig config=new JedisPoolConfig(); // 连接池的配置对象
        config.setMaxTotal(100); // 设置最大连接数
        config.setMaxIdle(10); // 设置最大空闲连接数
        JedisPool jedisPool=new JedisPool(config,"192.168.1.227",6379);

        Jedis jedis=jedisPool.getResource(); // 获取连接
        jedis.auth("chance123"); // 设置密码
        jedis.select(1);

        Jedis jedis1=jedisPool.getResource(); // 获取连接
        jedis1.auth("chance123"); // 设置密码
        jedis1.select(2);


        String currentTime = DateFormatUtils.format(System.currentTimeMillis(),"yyyyMMdd");
        Set<String> keys = jedis.hkeys("redis_map_pdd_product_sub_list");
        for (String key : keys) {
            String value = jedis.hget("redis_map_pdd_product_sub_list", key);
            jedis1.hset("redis_map_pdd_product_sub_list_"+currentTime, key,value);
        }
    }

    private  static void test(){

        CrawlerEnum.CrawlerRecordFilter filter = CrawlerEnum.CrawlerRecordFilter.keyOrDateRange;
        List<FilterInfo> filterInfos = Arrays.asList(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domainId,site,"queue"))
                ,FilterUtils.dateRangeFilterInfo(24*7,null));

        String requestQueueName = StringUtils.joinWith("-","crawler",domainId,site,"queue");
        String articleAccumQueueName = StringUtils.joinWith("-",  domainId,"article","accumulate", "queue");
        String url = "http://localhost:9599/crawler/domain/xhs/api/v1/results/queue/init?requestQueueName=%s&articleAccumQueueName=%s&hourFromNow=%d&isFilterComments=%s";

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId,turnPage)
                .httpUrl(commentStartUrl)
                .releaseTime(System.currentTimeMillis())
                .build();
        requestRecord.setFilter(filter);
        requestRecord.setFilterInfos(filterInfos);


        requestRecord.tagsCreator().bizTags().addDomain(domainId);
        requestRecord.tagsCreator().bizTags().addSite(site);

        HttpConfig httpConfig = HttpConfig.me("test");
//        httpConfig.setProxy(proxy);
//        httpConfig.setUseProxy(true);

        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());


        HttpRequestBody jsonBody = HttpRequestBody.json(JSON.toJSONString(requestRecord), "utf-8");
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(String.format(url,requestQueueName,articleAccumQueueName,24*7,"false"));
        httpRequest.setMethod(HttpConstant.Method.POST);
        httpRequest.setRequestBody(jsonBody);
        httpRequest.addHeader("crawler_schedule_tag",JSON.toJSONString(requestRecord.tagsCreator().scheduleTags()));

        HttpPage download = downloader.download(httpRequest, httpConfig);

        System.out.println(download.getRawText());


    }

    private static void cleanData(){
        Jedis jedis=new Jedis("192.168.1.215", 6379);
        jedis.select(3);
        String appSearchForTimeDescendingUrlRegular = "https://\\S*/api/sns/v10/search/notes\\?\\S*sort=time_descending\\S*";

        String wxappArticleUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/note/\\S*/single_feed";
        List<String> allKeys = jedis.lrange("crawler-xhs-bak-queue_list", 0, -1);
        for (String key : allKeys) {
            String record = jedis.hget("crawler-xhs-bak-queue_map", key);
            CrawlerRequestRecord crawlerRecord = JSON.parseObject(record, CrawlerRequestRecord.class);

            if (isUrlMatch(crawlerRecord.getHttpRequest().getUrl(),appSearchForTimeDescendingUrlRegular)){
                listCount++;
                parseSearchLinks(crawlerRecord,jedis);
            } else if (isUrlMatch(crawlerRecord.getHttpRequest().getUrl(),wxappArticleUrlRegular)){
                writeToRedis(crawlerRecord,jedis);
            } else {
                System.out.println(record);
            }
        }

        System.out.println(listCount + "-->" + writeToRedisCount + "-->" + nonWriteToRedisCount);
    }

//    private static void staticsCount(){
//        Jedis jedis=new Jedis("192.168.1.214", 6379);
//        jedis.select(3);
//
//        Set<String> dateRange = new HashSet<>();
//        dateRange.add("2021-03-23");
//
//        for (String date : dateRange) {
//            ScanOptions scanOptions = ScanOptions.scanOptions().count(500).match(date+"*").build();
//
//            jedis.hsc
//            Cursor<Map.Entry<Object, Object>> cursor = redisTemplate.opsForHash().scan(articleAccumQueueName, scanOptions);
//            while (cursor.hasNext()){
//                Map.Entry<Object,Object> cursorEntry = cursor.next();
//                System.out.println(cursorEntry.getKey());
//
//            }
//        }
//
//    }

    private static void parseSearchLinks(CrawlerRequestRecord crawlerRecord,Jedis jedis){
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        List<NameValuePair> parameters = URLEncodedUtils.parse(requestUrl.split("\\?")[1], Charset.forName("utf-8"));
        String keyword = "";
        for (NameValuePair parameter : parameters) {
            if ("keyword".equals(parameter.getName())) {
                keyword = parameter.getValue();
                break;
            }
        }
        String rawText = crawlerRecord.getInternalDownloadPage().getRawText();
        Json rawTextJson = new Json(rawText);
        List<String> items;
        try {
            items = rawTextJson.jsonPath($_type + ".data.items").all();

            if (items != null && items.size() > 0){
                for (String item : items) {
                    Json itemJson = new Json(item);
                    String modelType = itemJson.jsonPath($_type + ".model_type").get();
                    if ("note".equals(modelType)) {
                        String noteId = itemJson.jsonPath($_type + ".note.id").get();

                        String noteUrl = String.format(wxappArticleUrlFormat, noteId);
                        String urlPath = noteUrl.substring(noteUrl.indexOf("/fe_api/"));
                        String x_sign = "X" + md5(urlPath + "WSUDD");

                        CrawlerRequestRecord crawlerArticleRequestRecord = CrawlerRequestRecord.builder()
                                .itemPageRequest(crawlerRecord)
                                .httpUrl(noteUrl)
                                .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                                .httpHead("x-sign",x_sign)
                                .releaseTime(System.currentTimeMillis())
                                .needWashed(true)
                                .notFilterRecord()
                                .build();
                        crawlerArticleRequestRecord.tagsCreator().bizTags().addDomain("xhs");
                        crawlerArticleRequestRecord.tagsCreator().bizTags().addSite(site);
                        crawlerArticleRequestRecord.tagsCreator().bizTags().addSiteBiz(site_biz);
                        crawlerArticleRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Keword,keyword);

                        writeToRedis(crawlerArticleRequestRecord,jedis);
                    }
                }
            }
        }catch (Exception e){
            e.printStackTrace();
        }
    }

    public static boolean isUrlMatch(String requestUrl, String urlRegx){
        Pattern pattern=Pattern.compile(urlRegx);
        return pattern.matcher(requestUrl).matches();
    }

    private static void writeToRedis(CrawlerRequestRecord crawlerArticleRequestRecord,Jedis jedis){
        String recordKey = crawlerArticleRequestRecord.getRecordKey();

        if (!articleIds.contains(recordKey)){
            String key = DigestUtils.sha1Hex(System.currentTimeMillis()+recordKey);
            jedis.hset("crawler-xhs-test-queue_map", key,JSON.toJSONString(crawlerArticleRequestRecord));
            jedis.lpush("crawler-xhs-test-queue_list",key);
            writeToRedisCount++;
            articleIds.add(recordKey);
        } else {
            nonWriteToRedisCount++;
        }
    }

}
