package com.chance.cc.crawler.development.bootstrap.xcar.simple;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import com.chance.cc.crawler.development.utils.RedisReader;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.filter;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPageItem;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Site_Info;

/**
 * @ClassName xcar
 * @Description TODO
 * @Author songding
 * @Date 2021/9/21 15:46
 * @Version 1.0
 **/
public class XcarSimpleUrlToRedisStart {
    public static final String domain = "xcar";
    public static final String site = "SN";
    private static Proxy proxy = new Proxy();
    private static RedisReader redisReader = new RedisReader("192.168.1.215",6379,4);

    public static void main(String[] args) {
        articleAndVideo();
       // testArticle();
    }
    static {
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }
    public static void articleAndVideo(){
        String site_biz = "simple";
        String site_info = "simple-20210922-20211110";
        String path = "E:\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\xcar\\simple\\json.txt";
        try {
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(path),"UTF-8"));
            String readLine = null;
            while((readLine = bufferedReader.readLine()) != null){
                if(StringUtils.isBlank(readLine)){
                    continue;
                }
                //json格式
                JSONObject jsonObject = JSONObject.parseObject(readLine);
                String url = jsonObject.getString("article_url");
                String[] split = url.split("\\?");
                System.out.println(split[0]);
                CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                        .startPageRequest(domain, turnPageItem)
                        .domain(domain)
                        .httpUrl(split[0])
                        .releaseTime(System.currentTimeMillis())
                        .httpConfig(HttpConfig.me(domain))
                        .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                        .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*52,null))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                        .proxy(proxy)
                        .build();
                requestRecord.tagsCreator().bizTags().addDomain(domain);
                requestRecord.tagsCreator().bizTags().addSite(site);
                requestRecord.tagsCreator().bizTags().addSiteBiz(site_biz);
                requestRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info,site_info);
                requestRecord.tagsCreator().bizTags().addCustomKV("site",site);

                String requestKey = requestRecord.getRecordKey();
                CrawlerRecord crawlerRecord = new CrawlerRecord();
                crawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
                crawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24*52,null));
                requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(crawlerRecord));

                //确保key值唯一
                String shaKey = DigestUtils.sha1Hex(System.currentTimeMillis()+requestKey);
                redisReader.mapPush("crawler-xcar-articleAndVideo-simple-queue_map",shaKey, JSON.toJSONString(requestRecord));
                redisReader.listPush("crawler-xcar-articleAndVideo-simple-queue_list",shaKey);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    public static void testArticle(){
        String domain = "xcar";
        String site = "news";
        String url = "https://info.xcar.com.cn/202109/news_2062045_1.html?viewtype=all";
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain,CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .recordKey(url)
                .httpUrl(url)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*62,null))
                .needWashed(true)
                .build();
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addCustomKV("site",site);
        Map<String,Object> extras = new HashMap<>();
        extras.put("views",null);
        extras.put("likes",null);
        extras.put("comments",null);
        extras.put("urls",url);
        requestRecord.getHttpRequest().setExtras(extras);

        CrawlerRecord filterCrawlerRecord = new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-",filter,domain,"comment")));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 62,null));
        requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterCrawlerRecord));


        DevCrawlerController.builder()
                .crawlerThreadNum(1)
                .triggerInfo(domain,domain + "_forum_article_trigger",System.currentTimeMillis(),domain+ "_forum_article_job")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain)) //内存队列
                .consoleResultPipeline() //控制台输t出
                .requestRecord(requestRecord)
                .build("com.chance.cc.crawler.development.scripts.xcar")
                .start();
    }
}
