package com.chance.cc.crawler.development.scripts.weixin.api.api;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.*;

/**
 * @ClassName wx
 * @Description TODO
 * @Author songding
 * @Date 2021/11/11 16:20
 * @Version 1.0
 **/
public class WeiXinApiAccount extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(WeiXinApiAccount.class);
    private static final String domain = "weixin";
    private static final String site = "apiAccount";

    private static final String accountRegulars = "https://api.newrank.cn/api/sync/weixin/account/articles_content";

    private static final Object obj = new Object();
    private static final String key = "key";
    private static final String value = "vf94a4bdb7b0e49bba2e5z6jt";
    @Override
    public void initUrlRegulars() {
        addUrlRegular(accountRegulars);
    }
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        String startTime = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("startTime");
        String endTime = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("endTime");
        String pageSize = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("pageSize");
        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords){
                String urlSupport = supportSourceRecord.getHttpRequest().getUrl();
                if (urlSupport.contains("keys")){
                    HttpPage page = supportSourceRecord.getInternalDownloadPage();
                    String msg = page.getJson().jsonPath($_type + ".msg").get();
                    if (msg.equals("success")){
                        List<String> all = page.getJson().jsonPath($_type + ".content").all();
                        for (String data :all){
                            JSONObject jsonObject = JSONObject.parseObject(data);
                            String keyword = jsonObject.getString("keyword");
                            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                    .itemPageRequest(crawlerRequestRecord)
                                    .httpUrl(accountRegulars)
                                    .releaseTime(System.currentTimeMillis())
                                    .httpHead(key,value)
                                    .copyBizTags()
                                    .copyResultTags()
                                    .build();
                            Map<String,Object> bodyMap = new HashMap<>();
                            bodyMap.put("account",keyword);
                            bodyMap.put("from",startTime);
                            bodyMap.put("to",endTime);
                            bodyMap.put("pageSize",pageSize);
                            record.tagsCreator().bizTags().addCustomKV("account",keyword);
                            HttpRequestBody form = HttpRequestBody.form(bodyMap, "utf-8");
                            record.getHttpRequest().setRequestBody(form);
                            record.getHttpRequest().setMethod("post");
                            prepareLinks.add(record);
                        }
                    }
                }
            }
        }


        return prepareLinks;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> list = new ArrayList<>();
        if (page.getStatusCode() == 0){
            log.info("success");
        }else if (page.getStatusCode() == 10001){
            log.error("缺少必要参数");
            crawlerRecord.setNeedWashPage(false);
            return list;
        }else if (page.getStatusCode() == 10002){
            log.error("参数错误");
            crawlerRecord.setNeedWashPage(false);
            return list;
        }else if (page.getStatusCode() == 10004){
            log.error("密钥有误");
            crawlerRecord.setNeedWashPage(false);
            return list;
        } else if (page.getStatusCode() == 10006){
            log.error("请求频率过高");
            crawlerRecord.setNeedWashPage(false);
            synchronized (obj){
                try {
                    Thread.sleep(3000);

                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
            recordAgainDownload(crawlerRecord,page,list);
            return list;
        } else if (page.getStatusCode() == 10012){
            log.error("日期区间有误");
            crawlerRecord.setNeedWashPage(false);
            return list;
        }else if (page.getStatusCode() == 10016){
            log.error("定制账号列表不存在此账号");
            crawlerRecord.setNeedWashPage(false);
            return list;
        }else if (page.getStatusCode() == 10026){
            log.error("日期格式错误");
            crawlerRecord.setNeedWashPage(false);
            return list;
        }else if (page.getStatusCode() == 10050){
            log.error("服务器异常");
            crawlerRecord.setNeedWashPage(false);
            return list;
        }
        return list;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(accountRegulars)){
            washAccount(crawlerRecord,page,dataList);
        }
        return dataList;
    }

    private void washAccount(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        for (String str : all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String publicTime = jsonObject.getString("publicTime");
            String mid = jsonObject.getString("mid");
            Long time = 0l;
            try {
                time = DateUtils.parseDate(publicTime,"yyyy-MM-dd HH:mm:ss").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article,mid))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(time)
                    .content(str)
                    .build();
            dataList.add(crawlerData);
        }
    }

    private void recordAgainDownload(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String account = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("account");
        String startTime = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("startTime");
        String endTime = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("endTime");
        String pageSize = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("pageSize");
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(accountRegulars)
                .releaseTime(System.currentTimeMillis())
                .httpHead(key,value)
                .copyBizTags()
                .copyResultTags()
                .build();
        Map<String,Object> bodyMap = new HashMap<>();
        bodyMap.put("account",account);
        bodyMap.put("from",startTime);
        bodyMap.put("to",endTime);
        bodyMap.put("pageSize",pageSize);
        record.tagsCreator().bizTags().addCustomKV("account",account);
        HttpRequestBody form = HttpRequestBody.form(bodyMap, "utf-8");
        record.getHttpRequest().setRequestBody(form);
        parseLinks.add(record);
    }




    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return site.equals(crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site"));
    }
}
