package com.chance.cc.crawler.development.scripts.bitauto.yichehao;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.tags.KVTag;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

/**
 * @ClassName bitauto
 * @Description TODO
 * @Author songding
 * @Date 2021/11/25 12:14
 * @Version 1.0
 **/
public class BitautoYCHaoArticleCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(BitautoYCHaoArticleCrawlerScript.class);
    private static final String site = "YCHao_article";
    private static final String domain = "bitauto";
    private static final String x_platform = "phone";
    private static final String content_type = "application/json;charset=UTF-8";
    private static final String RECORD_AGAIN_REQUEST = "record_again_request";


    private static final String bitUrl = "https://hao.yiche.com/";
    private static final String listUrl = "https://hao.yiche.com/site_web/hao/api/get_latest_article_list";
    private static final String articleUrl = "https://news.yiche.com/hao/wenzhang/";

    private static final String articleRegulars = "https://news.yiche.com/hao/wenzhang/\\S*";

    @Override
    public void initUrlRegulars() {
        addUrlRegular(bitUrl);
        addUrlRegular(listUrl);
        addUrlRegular(articleRegulars);
    }


    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> list = new ArrayList<>();
        if (page.getStatusCode() != 200 ||  !page.isDownloadSuccess()){
            log.error("page == [{}] || statusCode != 200 and error page = "+ page.getStatusCode());
            if (page.getStatusCode() != 404){
                recordAgainRequest(crawlerRecord,list);
                crawlerRecord.setNeedWashPage(false);
                return  list;
            }else{
                crawlerRecord.setNeedWashPage(false);
                return  list;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(bitUrl)){
            this.parseList(crawlerRecord,page,list);
        }
        if (url.matches(listUrl)){
            this.parseArticle(crawlerRecord,page,list);
        }
        if (url.matches(articleRegulars)){
            this.parseArticleItem(crawlerRecord,page,list);
        }

        return list;
    }

    private void parseArticleItem(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> list) {

    }

    //获取第二页
    private void parseArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> list) {
        List<String> all = page.getJson().jsonPath($_type + ".data.itemList").all();
        for (String str : all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String createTime = jsonObject.getString("createTime");
            createTime = createTime.replace("T"," ");
            long time = 0l;
            try {
                time = DateUtils.parseDate(createTime, "yyyy-MM-dd HH:mm:ss").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }
            if(!isDateRange(crawlerRecord,time)){
                return;
            }
            String id = jsonObject.getString("id");
            String url = articleUrl+id;
            CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(time)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            list.add(crawlerRequestRecord);
        }
        KVTag key = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("pageIndex");
        Integer val = Integer.valueOf(String.valueOf(key.getVal()));
        String pageIndex = String.valueOf(val);
        long stagetime = System.currentTimeMillis();
        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date date = new Date(stagetime);
        String stageTime = simpleDateFormat.format(date);
        stageTime = "\""+stageTime+"\"";
        String x_signAndTime = getSign(pageIndex,stageTime);
        String[] split = x_signAndTime.split("/");
        String x_sign = split[0];
        String x_timestamp = split[1];
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(listUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("x-platform",x_platform)
                .httpHead("content-type",content_type)
                .httpHead("x-sign",x_sign)
                .httpHead("x-timestamp",x_timestamp)
                .copyBizTags()
                .copyResultTags()
                .build();
        crawlerRequestRecord.getHttpRequest().setMethod("post");
        String json = "{\"cid\":\"601\",\"param\":{\"pageIndex\":"+pageIndex+",\"pageSize\":10,\"stageTime\":"+stageTime+"}}";
        HttpRequestBody body = HttpRequestBody.json(json, "utf-8");
        crawlerRequestRecord.getHttpRequest().setRequestBody(body);
        val =val + 1;
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("pageIndex",val);
        list.add(crawlerRequestRecord);
    }

    private void parseList(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> list) {
        //第一页进来时 加密参数x_sign中stageTime  为空
        String stageTime = "\"\"";
        String pageIndex = "1";
        String x_signAndTime = getSign(pageIndex,stageTime);
        String[] split = x_signAndTime.split("/");
        String x_sign = split[0];
        String x_timestamp = split[1];
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(listUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("x-platform",x_platform)
                .httpHead("content-type",content_type)
                .httpHead("x-sign",x_sign)
                .httpHead("x-timestamp",x_timestamp)
                .copyBizTags()
                .copyResultTags()
                .build();
        crawlerRequestRecord.getHttpRequest().setMethod("post");
        String json = "{\"cid\":\"601\",\"param\":{\"pageIndex\":"+pageIndex+",\"pageSize\":10,\"stageTime\":\"\"}}";
        HttpRequestBody body = HttpRequestBody.json(json, "utf-8");
        crawlerRequestRecord.getHttpRequest().setRequestBody(body);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("pageIndex",1+1);
        list.add(crawlerRequestRecord);
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(listUrl)){
            this.washList(crawlerRecord,page,dataList);
        }
        if (url.matches(articleRegulars)){
            this.washArticle(crawlerRecord,page,dataList);
        }
        return dataList;
    }

    private void washList(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<String> all = page.getJson().jsonPath($_type + ".data.itemList").all();
        for (String str : all) {
            JSONObject jsonObject = JSONObject.parseObject(str);
            String createTime = jsonObject.getString("createTime");
            String id = jsonObject.getString("id");
            String url = articleUrl+id;
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV("time",createTime)
                    .addContentKV("url",url)
                    .build();
            dataList.add(crawlerData);
        }

    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {

    }


    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
    private static void recordAgainRequest(CrawlerRequestRecord crawlerRequestRecord,List<CrawlerRequestRecord> parseList){
        int count = 0;
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(RECORD_AGAIN_REQUEST)){
            count = Integer.valueOf(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(RECORD_AGAIN_REQUEST));
            if (count >= 10){
                log.error("url excessive number of repeated downloads this url = "+url);
            }
        }else{
            count = 1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .releaseTime(System.currentTimeMillis())
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey()+count)
                .copyResultTags()
                .copyBizTags()
                .build();

        record.getHttpRequest().setCookies(crawlerRequestRecord.getHttpRequest().getCookies());
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        record.getHttpRequest().setHeaders(crawlerRequestRecord.getHttpRequest().getHeaders());
        record.setNeedParsedPage(crawlerRequestRecord.isNeedParsedPage());
        record.setNeedWashPage(crawlerRequestRecord.isNeedWashPage());
        record.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count++);
        parseList.add(record);

    }

    public String getSign(String pageIndex,String stageTime){
        String time = String.valueOf(System.currentTimeMillis());
        String s="cid=601&param={\"pageIndex\":"+pageIndex+",\"pageSize\":10,\"stageTime\":"+stageTime+"}DB2560A6EBC65F37A0484295CD4EDD25"+time;
        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());
        HttpConfig httpConfig = HttpConfig.me("bitauto");
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setResponseCharset("UTF-8");
        httpRequest.setMethod("post");
        httpRequest.setUrl("http://192.168.1.210:8899/encrypt/sign");
        Map<String,Object> bodyMap = new HashMap<>();
        bodyMap.put("params",s);
        HttpRequestBody form = HttpRequestBody.form(bodyMap, "UTF-8");
        httpRequest.setRequestBody(form);
        HttpPage download = downloader.download(httpRequest, httpConfig);
        String rawText = download.getRawText();
        return rawText+"/"+time;
    }
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }


}
