package com.chance.cc.crawler.development.bootstrap.pcauto.article;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.record.CrawlerResultRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import com.chance.cc.crawler.development.utils.RedisReader;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author lt
 * @version 1.0
 * @date 2021-01-11 14:31:43
 * @email okprog@sina.com
 */
public class ArticleStart {

    public static final String domain = "pcauto";
    public static final String site = "article_keywords";
    public static final String listQueue = StringUtils.joinWith("-","crawler",domain,site,"listUrl","result");

    public static void main(String[] args) throws Exception {
//        test();
        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPageItem)
                .domain(domain)
                .recordKey("https://www.pcauto.com.cn/sitemap/sitemap.html")
                .httpUrl("https://www.pcauto.com.cn/sitemap/sitemap.html")
                .releaseTime(System.currentTimeMillis())
                .httpConfig(HttpConfig.me(domain))
                .filter(CrawlerEnum.CrawlerRecordFilter.key)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(domain))
                .needParsed(false)
                .needWashed(true)
                .build();

        keywordRecord.tagsCreator().bizTags().addDomain(domain);
        keywordRecord.tagsCreator().bizTags().addSite(site);

        DevCrawlerController.builder()
                .triggerInfo(domain,domain + "_trigger",System.currentTimeMillis(),domain+ "_job")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain)) //内存队列
                .consoleResultPipeline() //控制台输t出
                .requestRecord(keywordRecord)  //more job
                .crawlerThreadNum(1)
                .build()
                .start();

    }


    public static void test() throws Exception {
        List<String> kwLines = IOUtils.readLines(new FileInputStream("C:\\Users\\Administrator\\Documents\\chance\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\pcauto\\article\\pcAutoArticleKeys.txt"));
        List<String> records = new ArrayList<>();
        RedisReader redisReader = new RedisReader("192.168.1.215",6379,3);
        int count = 0;
        for (String kwLine : kwLines) {
            count++;
            JSONObject kwObj = JSONObject.parseObject(kwLine);
            String pid = kwObj.getString("pid");
            String mid = kwObj.getString("mid");
            String type1st = kwObj.getString("pingdao");
            String type2nd = kwObj.getString("mokuai");
            String url = kwObj.getString("mokuai_url");

            Map<String,String> contentMap = new HashMap<>();
            contentMap.put("type1st",type1st);
            contentMap.put("type2nd",type2nd);
            contentMap.put("url",url);
            contentMap.put("dataId", StringUtils.joinWith("-",domain,site,pid,mid));

            CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
            CrawlerData crawlerData = new CrawlerData();
            crawlerData.setCrawlerContent(JSON.toJSONString(contentMap));
            crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));

            records.add(JSON.toJSONString(crawlerResultRecord));
        }
        redisReader.listPush(listQueue,records.toArray(new String[records.size()]));
        System.out.println("总条数: " + count);
    }
}
