package com.chance.cc.crawler.development.scripts.weibo.invitation;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @ClassName weibo
 * @Description TODO
 * @Author songding
 * @Date 2021/10/8 9:32
 * @Version 1.0
 *  * 微博帖子自主开发
 **/
public class WeiboInvitationCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(WeiboInvitationCrawlerScript.class);
    private static final String domain = "weibo";
    private static final String site = "invitation";
    private static final String params = "%2523%25E4%25BA%25A4%25E8%25AD%25A6%25E5%259C%25A8%25E6%259C%258B%25E5%258F%258B%25E5%259C%2588%25E6%2599%2592%25E6%2594%25B6%25E5%2588%25B0%25E7%259A%2584%25E9%259B%25A8%25E4%25BC%259E%2523";

    private static final Map<String,String> cookieMap = new HashMap<>();
    private static final String cookie = "_2A25MVrUDDeRhGeNI7lYR-S_Fwz-IHXVvuNtLrDV8PUJbkNANLVmkkW1NSExA2JFf5T7pSaFeSBmHX3qvqAFPvWoG";
    private static final String UrlWeibo = "https://s.weibo.com/weibo/%s/?q=%s&typeall=1&suball=1&timescope=custom:%s:%s&Refer=g";

    private static final String weiboUrl = "https://s.weibo.com";
    private static final String weiboUrlRegulars = "https://s.weibo.com/weibo/\\S*";
    private static final String weiboUrlPageRegulars = "https://s.weibo.com/weibo/\\S*&page=\\S*";
    @Override
    public void initUrlRegulars() {
        addUrlRegular(weiboUrl);
        addUrlRegular(weiboUrlRegulars);
        addUrlRegular(weiboUrlPageRegulars);
    }


    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        String startTime = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("startTime");
        String endTime = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("endTime");
        if (supportSourceRecords != null) {
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")) {
                    HttpPage page = supportSourceRecord.getInternalDownloadPage();
                    String msg = page.getJson().jsonPath($_type + ".msg").get();
                    if (msg.equals("success")) {
                        List<String> all = page.getJson().jsonPath($_type + ".content").all();
                        StringBuffer paramss = new StringBuffer();
                        CrawlerRequestRecord record = null;
                        for (String data : all) {
                            JSONObject jsonObject = JSONObject.parseObject(data);
                            String keyword = jsonObject.getString("keyword");
                            String q = URLEncoder.encode("#"+keyword+"#");
                            String urlWeibo = String.format(UrlWeibo,params,startTime,endTime);
                            CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                                    .itemPageRequest(crawlerRequestRecord)
                                    .httpUrl(urlWeibo)
                                    .releaseTime(System.currentTimeMillis())
                                    .recordKey(urlWeibo)
                                    .copyBizTags()
                                    .copyResultTags()
                                    .build();
                            requestRecord.getHttpRequest().setCookies(cookieMap);
                            prepareLinks.add(requestRecord);
                        }
                    }
                }
            }
        }
        return prepareLinks;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 || page.isDownloadSuccess() != true){
            log.error("download error or page != 200  code="+page.getStatusCode());
            this.recordAgainDownload(crawlerRecord,page,parseLinks);
            crawlerRecord.setNeedWashPage(false);
            return parseLinks;
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(weiboUrlRegulars)){
            this.getWeiboUrlPage(crawlerRecord,page,parseLinks);
        }

        return parseLinks;
    }
    //得到下一页
    private void getWeiboUrlPage(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String pageUrl = page.getHtml().xpath("//*[@id=\"pl_feedlist_index\"]/div[3]/div/a/@href").get();
        if (crawlerRecord.getHttpRequest().getUrl().matches(weiboUrlPageRegulars)){
            pageUrl = page.getHtml().xpath("//*[@id=\"pl_feedlist_index\"]/div[3]/div/a[2]/@href").get();
        }
        if (pageUrl == null){
            log.error("The page is gone");
            crawlerRecord.setNeedWashPage(false);
            return;
        }
        pageUrl = pageUrl.replaceAll("amp;","");
        pageUrl = weiboUrl+pageUrl;
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(pageUrl)
                .releaseTime(System.currentTimeMillis())
                .recordKey(pageUrl)
                .copyBizTags()
                .copyResultTags()
                .build();
        record.getHttpRequest().setCookies(cookieMap);
        parseLinks.add(record);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(weiboUrlRegulars)){
            this.washUrl(crawlerRecord,page,dataList);
        }
        return dataList;
    }

    private void washUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<Selectable> nodes = page.getHtml().xpath("//*[@id=\"pl_feedlist_index\"]/div[2]/div").nodes();
        for (Selectable node : nodes){
            String mid = node.xpath("./@mid").get();//微博帖子mid
            String userName = node.xpath("./div/div[1]/div[2]/div[1]/div[2]/a[1]/@nick-name").get();
            String[] uids = node.xpath("./div/div[1]/div[2]/div[1]/div[2]/a/@href").get().split("/");
            String[] split = uids[3].split("\\?");
            String uid = split[0];
            StringBuffer contents = new StringBuffer();
            List<String> content = node.xpath("./div/div[1]/div[2]/p[1]/text()").all();
            for (String s : content){
                contents.append(s);
            }

            String forwards = node.xpath("./div/div[2]/ul/li[2]/a/text()").get().replaceAll(" ","");
            forwards = forwards.substring(2,forwards.length());
            if (StringUtils.isBlank(forwards)){
                forwards = "0";
            }
            String comments = node.xpath("./div/div[2]/ul/li[3]/a/text()").get();
            comments = comments.substring(2,comments.length());
            if (StringUtils.isBlank(comments)){
                comments = "0";
            }
            String like = node.xpath("./div/div[2]/ul/li[4]/a/em/text()").get();
            if (StringUtils.isBlank(like)){
                like = "0";
            }
            CrawlerData crawlerArticle = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,mid))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(AICCommonField.Field_Author,userName)
                    .addContentKV(AICCommonField.Field_Author_Id,uid)
                    .addContentKV(AICCommonField.Field_Content, String.valueOf(contents))
                    .build();
            dataList.add(crawlerArticle);
            CrawlerData crawlerInteraction = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.interaction,mid))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(AICCommonField.Field_I_Forwards,forwards)
                    .addContentKV(AICCommonField.Field_I_Comments,comments)
                    .addContentKV(AICCommonField.Field_I_Likes,like)
                    .build();
            dataList.add(crawlerInteraction);

        }
    }


    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }
    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
    static {
        cookieMap.put("SUB",cookie);
    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }

    private void recordAgainDownload(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        Integer integerCount = 0;
        String count = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("count");
        if (count == null){
            integerCount = 1;
        }else{
            integerCount = Integer.valueOf(count);
            if (integerCount >= 10){
                log.error("The number of downloads exceeds the limit");
                return;
            }
            integerCount+=1;
        }

        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.setRecordKey(crawlerRecord.getHttpRequest().getUrl()+integerCount);
        record.tagsCreator().bizTags().addCustomKV("count",integerCount);
        parseLinks.add(record);
    }
}
