package com.chance.cc.crawler.development.scripts.weibo.hotList;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;

/**
 * @ClassName weibo
 * @Description TODO
 * @Author songding
 * @Date 2021/10/8 15:21
 * @Version 1.0
 **/
public class WeiboHotListCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(WeiboHotListCrawlerScript.class);
    private static final String domain = "weibo";
    private static final String site = "hotList";

    private static final String weiboUrl = "https://v6.bang.weibo.com/newczv/\\S*";
    private static final String weiboHotListUrl = "https://v6.bang.weibo.com/aj/newczv/rank";
    @Override
    public void initUrlRegulars() {
        addUrlRegular(weiboUrl);
        addUrlRegular(weiboHotListUrl);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        String dt = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("time");
        String period_type = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("type");
        String filed_id = "1001";
        int show_rank = 0;
        if (url.matches(weiboUrl)){
                String params= "field_id="+filed_id+"&dt="+dt +"&page="+1 +"&show_rank="+0 +"&period_type="+period_type;
                HttpRequestBody xml = HttpRequestBody.xml(params, "utf-8");
                CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(weiboHotListUrl)
                        .httpHead("Content-Type","application/x-www-form-urlencoded; charset=UTF-8")
                        .httpHead("Referer",url)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                record.getHttpRequest().setMethod("post");
                record.getHttpRequest().setRequestBody(xml);
                record.tagsCreator().bizTags().addCustomKV("page",2);
                record.tagsCreator().bizTags().addCustomKV("show_rank",20);
                parseLinks.add(record);
        }
        if (url.matches(weiboHotListUrl)){
            KVTag page1 = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("page");
            KVTag show_rank1 = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("show_rank");
            int page2 = (int) page1.getVal();
            if (page2>5){
                return parseLinks;
            }
            int show_rank2 = (int) show_rank1.getVal();
            String params= "field_id="+filed_id+"&dt="+dt +"&page="+page2 +"&show_rank="+show_rank2 +"&period_type="+period_type;
            HttpRequestBody xml = HttpRequestBody.xml(params, "utf-8");
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(weiboHotListUrl)
                    .httpHead("Content-Type","application/x-www-form-urlencoded; charset=UTF-8")
                    .httpHead("Referer",url)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .build();
            record.getHttpRequest().setMethod("post");
            record.getHttpRequest().setRequestBody(xml);
            record.tagsCreator().bizTags().addCustomKV("page",page2+1);
            record.tagsCreator().bizTags().addCustomKV("show_rank",show_rank2+20);
            parseLinks.add(record);

        }
        return parseLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(weiboHotListUrl)){
            this.washArticle(crawlerRecord,page,dataList);
        }
        return dataList;
    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String msg = page.getJson().jsonPath($_type + ".msg").get();
        if (!msg.equals("success")){
            log.error("The visit to fail");
            return;
        }
        List<String> uids = page.getJson().jsonPath($_type + ".data.rankData..uid").all();
        List<String> scores = page.getJson().jsonPath($_type + ".data.rankData..score").all();
        List<String> ranks = page.getJson().jsonPath($_type + ".data.rankData..rank").all();
        List<String> screen_names = page.getJson().jsonPath($_type + ".data.rankData..screen_name").all();
        List<String> verified_reasons = page.getJson().jsonPath($_type + ".data.rankData..verified_reason").all();
        for (int i = 0; i <uids.size() ; i++) {
            String uid = uids.get(i);
            String score = scores.get(i);
            String rank = ranks.get(i);
            String screen_name = screen_names.get(i);
            String verified_reason = verified_reasons.get(i);
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article,uid))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(System.currentTimeMillis())
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .addContentKV(AICCommonField.Field_Author,screen_name)
                    .addContentKV(AICCommonField.Field_Author_Id,uid)
                    .addContentKV(AICCommonField.Field_Floor,rank)
                    .addContentKV(AICCommonField.Field_Score,score)
                    .addContentKV("type",verified_reason)
                    .build();
            dataList.add(crawlerData);
        }
    }


    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }
    private void recordAgainDownload(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        Integer integerCount = 0;
        String count = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("count");
        if (count == null){
            integerCount = 1;
        }else{
            integerCount = Integer.valueOf(count);
            if (integerCount >= 10){
                log.error("The number of downloads exceeds the limit");
                return;
            }
            integerCount+=1;
        }

        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.setRecordKey(crawlerRecord.getHttpRequest().getUrl()+integerCount);
        record.tagsCreator().bizTags().addCustomKV("count",integerCount);
        parseLinks.add(record);
    }
    @Override
    public String domain() {
        return domain;
    }
}
