package com.chance.cc.crawler.development.scripts.bitauto.yichehao;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConstant;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Field_Floor;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Field_Author_Follows;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Series;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @Author: ZhaoHhuan on 2021/11/25 15:24
 * @Email: 18638575967@163.com
 * @Description:
 *      易车号改版
**/
public class BitautoHaoCrawlerScript extends CrawlerCommonScript {
    private Logger logger = LoggerFactory.getLogger(BitautoHaoCrawlerScript.class);

    private final String domain = "bitauto";
    private final String scriptSite = "hao";
    private static final String REQUEST_AGAIN_TAG = "request_retry";

    static List<HeaderEncryptKey> list = Arrays.asList(
            new HeaderEncryptKey("pc","19DDD1FBDFF065D3A4DA777D2D7A81EC","508"),
            new HeaderEncryptKey("phone","DB2560A6EBC65F37A0484295CD4EDD25","601"),
            new HeaderEncryptKey("h5","745DFB2027E8418384A1F2EF1B54C9F5","601"),
            new HeaderEncryptKey("business_applet","64A1071F6C3C3CC68DABBF5A90669C0A","601"),
            new HeaderEncryptKey("wechat","AF23B0A6EBC65F37A0484395CE4EDD2K","601")
    );

    private final String articleListUrl = "https://hao.yiche.com/site_web/hao/api/get_latest_article_list";


    private final String articleUrlFormat  ="https://news.yiche.com/hao/wenzhang/%s";


    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        String requestUrl = requestRecord.getHttpRequest().getUrl();
        //对列表页进行处理
        if(requestUrl.matches(articleListUrl)){
            HttpRequest httpRequest = requestRecord.getHttpRequest();
            httpRequest.setMethod(HttpConstant.Method.POST);

            Map<String, String> headers = httpRequest.getHeaders();
            Map<String, Object> extras = httpRequest.getExtras();

            if(headers == null || StringUtils.isBlank(headers.get("x-platform")) || extras == null || StringUtils.isBlank((String)extras.get("pageIndex"))){
                logger.error("request head x-platform or extras pageIndex can not is null!");
                requestRecord.setDownload(false);
                requestRecord.setNeedParsedPage(false);
            }else{
                String xPlatform = headers.get("x-platform");
                Map<String,Object> param = new HashMap<>();
                int pageIndex = (Integer)extras.get("pageIndex");
                param.put("pageIndex",pageIndex);
                param.put("pageSize",StringUtils.isNotBlank((String)extras.get("pageSize")) ? (String) extras.get("pageSize") : "10" );
                if(pageIndex == 1){
                    param.put("stageTime","");
                }else{
                    SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    Date date = new Date(System.currentTimeMillis());
                    param.put("stageTime",simpleDateFormat.format(date));
                }

                try{
                    SignParam sign = getSign(xPlatform, JSONObject.toJSONString(param));
                    if(sign == null){
                        logger.error("getHeaderEncryptKey can not get");
                        requestRecord.setDownload(false);
                        requestRecord.setNeedParsedPage(false);
                    }else{
                        httpRequest.addHeader("x-timestamp",sign.getCurrentTime());
                        httpRequest.addHeader("x-sign",sign.getSign());

                        Map<String,Object> body = new HashMap<>();
                        body.put("param",param);
                        body.put("cid",sign.getCid());
                        httpRequest.setRequestBody(HttpRequestBody.json(JSONObject.toJSONString(body),"UTF-8"));
                    }
                }catch (Exception e){
                    logger.error(e.getMessage());
                }
            }
        }
        return super.prepareRequest(requestRecord,supportSourceRecords);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> requestRecordList = new ArrayList<>();

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        if(!page.isDownloadSuccess() || page.getStatusCode() != 200){
            logger.error("[{}] download error!status is {},will retry!",requestUrl,page.getStatusCode());
            if(page.getStatusCode() != 404){
                this.requestAgainCrawlerRecord(requestRecordList,crawlerRecord);
            }
            crawlerRecord.setNeedWashPage(false);
        }

        if(requestUrl.matches(articleListUrl)){
            parseListUrl(crawlerRecord,page,requestRecordList);
        }


        return requestRecordList;
    }

    private void parseListUrl(CrawlerRequestRecord crawlerRecord, HttpPage page,List<CrawlerRequestRecord> requestRecordList){
        try{

            Json json = page.getJson();
            String message = json.jsonPath($_type + ".message").get();
            if(!"success".equals(message)){
                logger.error("[{}] message is [{}]",crawlerRecord.getRecordKey(),message);
                requestAgainCrawlerRecord(requestRecordList,crawlerRecord);
                crawlerRecord.setNeedWashPage(false);
                return;
            }

            String hasNextPage = json.jsonPath($_type + ".data.hasNextPage").get();
            if(hasNextPage.equals("true")){
                //翻页
                Map<String, Object> extras = page.getRequest().getExtras();
                int pageIndex = ((Integer)extras.get("pageIndex")) + 1;
                String requestUrl = page.getRequest().getUrl();

                CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(requestUrl)
                        .recordKey(requestUrl + pageIndex)
                        .releaseTime(System.currentTimeMillis())
                        .httpHeads(page.getRequest().getHeaders())
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                extras.put("pageIndex",pageIndex);
                turnRecord.getHttpRequest().setExtras(extras);
                requestRecordList.add(turnRecord);
            }

            //获取详情页
            List<String> all = json.jsonPath($_type + ".data.itemList").all();
            for (String s : all) {
                JSONObject jsonObject = JSONObject.parseObject(s);
                String id = jsonObject.getString("id");
                String realtime = jsonObject.getString("createTime");
                realtime = StringUtils.isNotBlank(realtime) && realtime.contains("T") ? realtime.replace("T"," ") : "";
                if(StringUtils.isBlank(id) || StringUtils.isBlank(realtime)){
                    continue;
                }

                String articleUrl = String.format(articleUrlFormat,id);
                long releaseTimeToLong = DateUtils.parseDate(realtime, "yyyy-MM-dd HH:mm:ss").getTime();
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(articleUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .resultLabelTag(article)
                        .resultLabelTag(interaction)
                        .build();
                itemRecord.tagsCreator().bizTags().addSite("hao_article");
                requestRecordList.add(itemRecord);
            }
        }catch (Exception e){
            logger.error(e.getMessage());
        }

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        return null;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(articleListUrl);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String site = crawlerRecord.tagsCreator().bizTags().site();
        return site.equals(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                logger.error("download page the number of retries exceeds the limit" +
                        ",request url {},detail is [{}]", crawlerRecord.getHttpRequest().getUrl(), JSONObject.toJSONString(crawlerRecord));
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    //获取sign值
    private SignParam getSign(String xPlatform,String param) throws NoSuchAlgorithmException {
        HeaderEncryptKey headerEncryptKey = getHeaderEncryptKey(xPlatform);
        if(headerEncryptKey == null){
            return null;
        }
        String cid = headerEncryptKey.getCid();
        String value = headerEncryptKey.getValue();

        String currentTime = String.valueOf(System.currentTimeMillis());
        String params = "cid=" + cid + "&param=" + param + value + currentTime;

        return new SignParam(md5(params),currentTime,cid);
    }

    private HeaderEncryptKey getHeaderEncryptKey(String xPlatform){
        for (HeaderEncryptKey headerEncryptKey : list) {
            String name = headerEncryptKey.getName();
            if(name.equals(xPlatform)){
                return headerEncryptKey;
            }
        }

        logger.error("{} can`t get HeaderEncryptKey",xPlatform);
        return null;
    }

    private static class SignParam{
        private String sign;
        private String currentTime;
        private String cid;

        SignParam(String sign,String currentTime,String cid){
            this.sign = sign;
            this.currentTime = currentTime;
            this.cid = cid;
        }
        public String getSign(){
            return this.sign;
        }

        public String getCid(){
            return this.cid;
        }

        public String getCurrentTime(){
            return this.currentTime;
        }
    }

    private static class HeaderEncryptKey{
        private String name;
        private String value;
        private String cid;

        HeaderEncryptKey(String name,String value,String cid){
            this.name = name;
            this.value = value;
            this.cid = cid;
        }
        public String getName(){
            return this.name;
        }

        public String getValue(){
            return this.value;
        }

        public String getCid(){
            return this.cid;
        }
    }

    public static void main(String[] args) {
        HttpRequest request = new HttpRequest();
        Map<String,String> map = new HashMap<>();
//        map.put("pageIndex")
//        request.setRequestBody(HttpRequestBody.json());
        String s = "cid=601&param={\"pageIndex\":2,\"pageSize\":10,\"stageTime\":\"2021-11-25 16:06:15\"}DB2560A6EBC65F37A0484295CD4EDD251637827629773";
        try {
            String s1 = md5(s);
            System.out.println(s1);
        } catch (NoSuchAlgorithmException e) {
            e.printStackTrace();
        }
    }
}
