package com.chance.cc.crawler.development.scripts.weibo.subscribe;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConstant;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.SimpleDateFormat;
import java.util.*;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.interaction;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/4/12 16:59
 * @Description 获取订阅用户的信息
 **/
public class WeiboApiSubscribeUserIdCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(WeiboApiSubscribeUserIdCrawlerScript.class);

    private static final String DOMAIN = "weibo";
    private static final String SITE = "subscribeUserId";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_api_request_retry";
    private static final String SOURCE = "source";
    private static final String SUBID = "subid";
    private static final String DOMAIN_RESULT_JSON_RECORD_TAG = "domain_result_json";//初始record结果字段
    private static final String access_token = "2.00o4_w1HrAaeYBedf38e38b8SnITmD";

    private static final String ENTRANCE_URL = "https://c.api.weibo.com/commercial/push";
    private static final String SUBSCRIBE_URL = "https://c.api.weibo.com/commercial/push\\S+";
    private static final String ENTRANCE_INTERACTION_URL = "https://c.api.weibo.com/2/statuses/count/biz.json";
    private static final String INTERACTION_SOURCE_URL = "https://c.api.weibo.com/2/statuses/count/biz.json?access_token=" + access_token + "&ids=%s";
    private static final String INTERACTION_URL = "https://c.api.weibo.com/2/statuses/count/biz.json\\S+";

    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return DOMAIN;
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular(ENTRANCE_URL);
        addUrlRegular(SUBSCRIBE_URL);
        addUrlRegular(ENTRANCE_INTERACTION_URL);
        addUrlRegular(INTERACTION_URL);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();
        String requestUrl = requestRecord.getHttpRequest().getUrl();

        //拼接起始地址,获取到起始的record
        if (requestUrl.matches(ENTRANCE_URL)) {
            Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
            if (extras == null || !extras.containsKey(SOURCE) || !extras.containsKey(SUBID)) {
                log.error("extras message is error!");
                return crawlerRecords;
            }

            if (requestRecord.tagsCreator().bizTags().hasKVTag(DOMAIN_RESULT_JSON_RECORD_TAG)) {
                KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(DOMAIN_RESULT_JSON_RECORD_TAG);
                CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()), CrawlerDomainUrls.class);
                String url = crawlerDomainUrls.getUrl();
                Json urlJson = new Json(url);
                String lastId = urlJson.jsonPath($_type + ".lastId").get();
                extras.put("since_id", lastId);
                requestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(DOMAIN_RESULT_JSON_RECORD_TAG);
            }

            Set<String> set = extras.keySet();
            String url = ENTRANCE_URL + "?";
            for (String key : set) {
                url = url + key + "=" + extras.get(key) + "&";
            }
            url = url.substring(0, url.length() - 1);
            CrawlerRequestRecord startRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(requestRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .needWashed(true)
                    .copyResultTags()
                    .build();
            startRecord.getHttpRequest().addExtra(SOURCE, (String) extras.get(SOURCE));
            crawlerRecords.add(startRecord);
        }

        if (requestUrl.matches(ENTRANCE_INTERACTION_URL)) {
            Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
            if (extras == null || !extras.containsKey(SOURCE)) {
                log.error("extras message is error!");
                return crawlerRecords;
            }


            if (requestRecord.tagsCreator().bizTags().hasKVTag(DOMAIN_RESULT_JSON_RECORD_TAG)) {
                KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(DOMAIN_RESULT_JSON_RECORD_TAG);
                CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()), CrawlerDomainUrls.class);
                String url = crawlerDomainUrls.getUrl();
                Json urlJson = new Json(url);
                String mid = urlJson.jsonPath($_type + ".mid").get();
                long releaseTimeToLong = Long.parseLong(urlJson.jsonPath($_type + ".releaseTimeToLong").get());
                if (!isDateRange(requestRecord, releaseTimeToLong)) {
                    return crawlerRecords;
                }
                crawlerRecords.add(getInteractionRecord(requestRecord,mid,releaseTimeToLong));
            }
        }
        return crawlerRecords;
    }

    /**
     * 判断是否在时间范围内
     *
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord, Long releaseTimeToLong) {
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    } else if (hourFromNow != 0) {
                        endTime = System.currentTimeMillis();
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if (startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime) {
                isRange = true;
            }
        } else {
            isRange = true;
        }
        return isRange;
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     *
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return site.startsWith(SITE);
    }

    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();

        if (!httpPage.isDownloadSuccess() || (httpPage.getStatusCode() != 200 && httpPage.getStatusCode() != 404)) {
            log.error("{} status code ：[{}]", crawlerRequestRecord.getHttpRequest().getUrl(), httpPage.getStatusCode());
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if (requestUrl.matches(SUBSCRIBE_URL)) {
            subscribeUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }
        return parsedLinks;
    }

    //根据获取到的订阅信息获取评论record
    private void subscribeUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        CrawlerResultTags resultTags = crawlerRequestRecord.tagsCreator().resultTags();
        if (resultTags.hasDataType(interaction)) {
            resultTags.getCategoryTag().removeLabelTag("interaction");
        } else {
            return;
        }
        String rawText = httpPage.getRawText();
        String[] split = rawText.split("\\r\\n");

        for (String data : split) {
            try {
                String mid = new Json(data).jsonPath($_type + ".text.status.mid").get();
                String releaseTime = new Json(data).jsonPath($_type + ".text.status.created_at").get();
                Long releseTimeToLong = parseTime(releaseTime);
                parsedLinks.add(getInteractionRecord(crawlerRequestRecord, mid, releseTimeToLong));
            } catch (Exception e) {
                log.error("mid or releaseTime is not get! detail is [{}]", JSONObject.toJSONString(data));
            }
        }
    }

    //生成单个评论record
    private CrawlerRequestRecord getInteractionRecord(CrawlerRequestRecord crawlerRequestRecord, String mid, long releaseTimeToLong) {
        String url = String.format(INTERACTION_SOURCE_URL, mid);
        CrawlerRequestRecord interactionRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(url)
                .releaseTime(releaseTimeToLong)
                .copyBizTags()
                .resultLabelTag(interaction)
                .build();
        return interactionRecord;
    }


    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        CrawlerResultTags crawlerResultTags = crawlerRecord.tagsCreator().resultTags();

        if (crawlerResultTags.hasDataType(article)) {
            crawlerDataList.addAll(washArticle(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(interaction)) {
            crawlerDataList.addAll(washInteraction(crawlerRecord, page));
        }


        return crawlerDataList;
    }

    private List<CrawlerData> washArticle(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();
        String site = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

        String rawText = httpPage.getRawText();
        String[] split = rawText.split("\\r\\n");
        for (String data : split) {
            try{
                String mid = new Json(data).jsonPath($_type + ".text.status.mid").get();
                String releaseTime = new Json(data).jsonPath($_type + ".text.status.created_at").get();
                Long releseTimeToLong = parseTime(releaseTime);

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(requestRecord, httpPage)
                        .dataId(StringUtils.joinWith("-", requestRecord.getDomain(), site, article.enumVal(), mid))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                        .releaseTime(releseTimeToLong)
                        .url(itemUrl)
                        .content(data)
                        .flowInPipelineTag("kafka")
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerDataList.add(crawlerData);

                if (releaseTime != null) {
                    CrawlerData crawlerDataPost = CrawlerData.builder()
                            .data(requestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", requestRecord.getDomain(), site, "post", mid))
                            .url(itemUrl)
                            .addContentKV("mid", mid)
                            .addContentKV("releaseTimeToLong", String.valueOf(releseTimeToLong))
                            .flowInPipelineTag("mysql_post")
                            .build();
                    crawlerDataPost.setFilterPipelineResult(true);
                    crawlerDataList.add(crawlerDataPost);
                }
            }catch (Exception e){
                log.error(e.getMessage());
            }
        }

        String lastId = JSONObject.parseObject(split[split.length - 1]).getString("id");
        CrawlerData crawlerDataLast = CrawlerData.builder()
                .data(requestRecord, httpPage)
                .dataId(StringUtils.joinWith("-", requestRecord.getDomain(), site, article.enumVal()))
                .url(itemUrl)
                .addContentKV("lastId", lastId)
                .flowInPipelineTag("mysql")
                .build();
        crawlerDataLast.setFilterPipelineResult(true);
        crawlerDataList.add(crawlerDataLast);

        return crawlerDataList;
    }

    private List<CrawlerData> washInteraction(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();
        String site = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

        JSONArray jsonArray = JSONArray.parseArray(httpPage.getRawText());
        for (Object o : jsonArray) {
            JSONObject data = (JSONObject) o;
            String mid = data.getString("id");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .parentId(StringUtils.joinWith("-", requestRecord.getDomain(), site, article.enumVal(), mid))
                    .dataId(StringUtils.joinWith("-", requestRecord.getDomain(), site, interaction.enumVal(), mid))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                    .releaseTime(requestRecord.getReleaseTime())
                    .url(itemUrl)
                    .content(JSONObject.toJSONString(data))
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerDataList.add(crawlerData);
        }
        return crawlerDataList;
    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {

        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                log.error("Weibo search keyword download page the number of retries exceeds the limit" +
                        ",request url {},download detail {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                .recordKey(crawlerRecord.getRecordKey() + count)
                .releaseTime(System.currentTimeMillis())
                .notFilterRecord()
                .copyBizTags()
                .copyResultTags()
                .build();

        String method = crawlerRecord.getHttpRequest().getMethod();
        if (HttpConstant.Method.POST.equals(method)) {
            HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
            httpRequest.setMethod(method);
            httpRequest.setRequestBody(crawlerRecord.getHttpRequest().getRequestBody());
        }

        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    //将'Wed Jul 14 18:30:03 +0800 2021'时间转为时间戳
    private static Long parseTime(String dateString) {
        dateString = dateString.replace("GMT", "").replaceAll("\\(.*\\)", "");
        SimpleDateFormat format = new SimpleDateFormat("EEE MMM dd HH:mm:ss +0800 yyyy", Locale.ENGLISH);
        Date dateTrans = null;
        try {
            dateTrans = format.parse(dateString);
            return dateTrans.getTime();
        } catch (Exception e) {
            log.error(e.getMessage());
        }
        return 0L;

    }

    //生成midList
    private static List<String> getMidList(HttpPage httpPage) {
        List<String> midList = new ArrayList<>();
        String rawText = httpPage.getRawText();
        String[] split = rawText.split("\\r\\n");
        for (String data : split) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String mid = jsonObject.getJSONObject("text").getJSONObject("status").getString("mid");
            if (midList.contains(mid)) {
                continue;
            }
            midList.add(mid);
        }
        return midList;
    }
    //生成评论recordlist
    private List<CrawlerRequestRecord> getInteractionRecordList(CrawlerRequestRecord crawlerRequestRecord, List<String> midList) {
        List<CrawlerRequestRecord> crawlerRequestRecordList = new ArrayList<>();
        int toIndex = 100;
        for (int i = 0; i < midList.size(); i += toIndex) {
            if (i + toIndex > midList.size()) {
                // 注意下标问题
                toIndex = midList.size() - i;
            }
            List newList = midList.subList(i, i + toIndex);
            String midString = StringUtils.join(newList, ",");
            String url = String.format(INTERACTION_SOURCE_URL, midString);
            CrawlerRequestRecord interactionRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(url)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .resultLabelTag(interaction)
                    .build();
            crawlerRequestRecordList.add(interactionRecord);
        }
        return crawlerRequestRecordList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    public static void main(String[] args) {
        String s = "Wed Jul 14 18:30:03 +0800 2021";
        Long releaseTimeToLong = parseTime(s);
        System.out.println(releaseTimeToLong);
    }
}
