package com.chance.cc.crawler.development.bootstrap.taobao.supplement;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.utils.RedisReader;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;

import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.comment;

/**
 * @author bx
 * @date 2021/1/27 0027 16:27
 */
public class TBCommentSupplementURLImport {

    public static final String domain = "taobao";
    private static String searchUrl = "https://rate.taobao.com/feedRateList.htm";

    public static Proxy proxy = new Proxy();
    static {
        //代理配置
        //H5168QRFNIU3804D
        //5F6B3610BB719FAA
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    public static void main(String[] args) throws IOException {
        long[] dateRange = new long[2];
        dateRange[0] = 1569686400000L; //20190929
        dateRange[1] = System.currentTimeMillis();

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl(searchUrl)
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .resultLabelTag(comment)
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(-1,dateRange))
                .proxy(proxy)
                .build();
        crawlerRequestRecord.setDownload(false);
        crawlerRequestRecord.setSkipPipeline(true);

        RedisReader redisReader = new RedisReader("192.168.1.215",6379,4);
        String requestQueueName = StringUtils.joinWith("-","crawler",domain,"comment","supplement","queue");

        List<String> lines = IOUtils.readLines(new FileInputStream("D:\\chance\\采集\\补采\\taobao\\tb_url.txt"),"UTF-8");
        int count = 0;
        for (String line : lines) {
            String url = line.split("\t")[0];
            String auctionNumId = null;
            List<NameValuePair> parameters = URLEncodedUtils.parse(url.split("\\?")[1], Charset.forName("utf-8"));
            for (NameValuePair parameter : parameters) {
                if ("id".equals(parameter.getName())){
                    auctionNumId = parameter.getValue();
                }
            }
            if (StringUtils.isNotBlank(auctionNumId)){
                count++;
                Map<String,String> resultMap = new HashMap<>();
                resultMap.put("detailUrl",line);
                resultMap.put("itemId",auctionNumId);
                resultMap.put("userId","common");

                CrawlerDomainUrls crawlerDomainUrls = new CrawlerDomainUrls();
                crawlerDomainUrls.setUrl(JSON.toJSONString(resultMap));

                crawlerRequestRecord.setRecordKey(StringUtils.joinWith("-",domain,auctionNumId));
                crawlerRequestRecord.setScheduleTime(crawlerRequestRecord.tagsCreator().scheduleTags().triggerTime());
                crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("domain_result_json",JSON.toJSONString(crawlerDomainUrls));

                String shaKey = DigestUtils.sha1Hex(System.currentTimeMillis()+crawlerRequestRecord.getRecordKey());
                redisReader.mapPush(requestQueueName+"_map",shaKey, JSON.toJSONString(crawlerRequestRecord));
                redisReader.listPush(requestQueueName+"_list",shaKey);
            } else {
                System.out.println("error url : " + line);
            }
////
//            if (count >= 1){
//                break;
//            }
        }
        System.out.println("process record count:"+count);
    }
}
