package com.chance.cc.crawler.development.scripts.jingdong;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;

import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Domain_Result_Json;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_ECommerce_List_Content;

/**
 * @author bx
 * @date 2020/11/13 0013 15:41
 */
public class JDH5CommentCrawlerScript extends CrawlerCommonScript {

    private Logger log = LoggerFactory.getLogger(JDH5CommentCrawlerScript.class);

    public static final String commentUrlFormat = "https://wq.jd.com/commodity/comment/getcommentlist?callback=skuJDEvalA&version=v2" +
            "&pagesize=10&sceneval=2&score=0&sku=%s&sorttype=6&page=%d&t=%s";

    public static final String commentCurrentPageNum = "currentPageNum";
    public static final String commentProductId = "productId";

    private static LinkedBlockingQueue<String> userAgents = new LinkedBlockingQueue<>();
    public static Object uaObject = new Object();

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        if(supportSourceRecords !=null && supportSourceRecords.size()  > 0){
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                if (supportSourceRecord.getHttpRequest().getUrl().contains("fake-useragent.herokuapp.com/browsers")){
                    initUserAgents(supportSourceRecord); //初始user agents
                }
            }
        }

        if(userAgents.size() <1){
            synchronized (uaObject){
                if (userAgents.size() <1){
                    try {
                        userAgents.addAll(IOUtils.readLines(new FileInputStream("/data/chance_crawler_runner/data/ua/ua_jingdong.txt"), "utf-8"));
                    } catch (IOException e) {
                        log.error(e.getMessage());
                    }
                }
            }
        }

        HttpRequest httpRequest = requestRecord.getHttpRequest();

        if(requestRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
            KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(Tag_Field_Domain_Result_Json);
            CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()),CrawlerDomainUrls.class);
            String url = crawlerDomainUrls.getUrl();
            Json urlJson = new Json(url);
            String detailUrl= urlJson.jsonPath($_type + ".detailUrl").get();
            String itemId= urlJson.jsonPath($_type + ".itemId").get();
            String jdCommentUrl = String.format(commentUrlFormat, itemId, 1, Math.random());
            httpRequest.setUrl(jdCommentUrl);
            httpRequest.getHeaders().put("referer",detailUrl);
            httpRequest.getHeaders().put("Accept","*/*");
            requestRecord.setDownload(true);
            requestRecord.setNeedParsedPage(true);
            requestRecord.setNeedWashPage(true);
            requestRecord.setSkipPipeline(false);
            requestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_ECommerce_List_Content,url);
            requestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json); //移除
        }

        String ua = userAgents.poll();
        log.info("poll new ua: "+ ua);
        userAgents.add(ua);
        httpRequest.getHeaders().put("user-agent",ua);

        if (requestRecord.isDownload()){
            List<NameValuePair> parameters = URLEncodedUtils.parse(requestRecord.getHttpRequest().getUrl().split("\\?")[1], Charset.forName("utf-8"));
            String currentPageNumStr = null;
            String productId = null;
            for (NameValuePair parameter : parameters) {
                if ("page".equals(parameter.getName())){
                    currentPageNumStr = parameter.getValue();
                }
                if ("sku".equals(parameter.getName())){
                    productId = parameter.getValue();
                }
            }

            requestRecord.getHttpRequest().addExtra(commentCurrentPageNum,currentPageNumStr);
            requestRecord.getHttpRequest().addExtra(commentProductId,productId);
        }

        return super.prepareRequest(requestRecord,supportSourceRecords);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        //解析下载数据是否正常，不正常则放入下次循环，不再进行数据清洗
        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if (!page.isDownloadSuccess()){
            log.error("jd h5 product comment proxy has error ,will retry");
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false ); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (page.getStatusCode() == 401){
            try {
                log.error("jd h5 product comment response status code is 401 ,will retry");
                Thread.sleep(1500);
            } catch (InterruptedException e) {
                log.error(e.getMessage());
            }
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        try {

            String rawText = page.getRawText();
//            if (StringUtils.isBlank(rawText)){
//                Thread.sleep(1000);
//                throw new Exception("jd request url "+page.getRequest().getUrl()+" download content is null!");
//            }
            if(page.getRawText().endsWith(";")){
                rawText = rawText.substring(0,rawText.lastIndexOf(";"));
            }
            Json json = new Json(rawText);
            json = json.removePadding("skuJDEvalA");
            String productId = crawlerRecord.getHttpRequest().getExtras().get(commentProductId).toString();
            int maxPage = Integer.valueOf(json.jsonPath($_type + ".result.maxPage").get());
            int currentPageNum = Integer.valueOf(crawlerRecord.getHttpRequest().getExtras().get(commentCurrentPageNum).toString());

            List<String> comments = json.jsonPath($_type + ".result.comments").all();
            if(currentPageNum < maxPage && comments != null && comments.size() > 0){
                String nextPageUrl = String.format(commentUrlFormat, productId, currentPageNum + 1,Math.random());
                CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(nextPageUrl)
                        .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                        .needWashed(true)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                crawlerRequestRecords.add(crawlerRequestRecord);

            }

        } catch (Exception e) {
            log.error("jd h5 product comment download failed,error message {}",e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);

            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
        }
        return crawlerRequestRecords;
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

        int count = 1;
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
            if (count >= 500){
                log.error("jd h5 product comment download he number of retries exceeds the limit" +
                        ",request url {}",crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        count++;
        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .needWashed(true)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .build();
        crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        try {
            Json json = new Json(page.getRawText());
            json = json.removePadding("skuJDEvalA");
            List<String> comments = json.jsonPath($_type + ".result.comments").all();
            String productCommentSummary = json.jsonPath($_type + ".result.productCommentSummary").get();
            String productId = crawlerRecord.getHttpRequest().getExtras().get(commentProductId).toString();
            if (comments != null && comments.size() > 0){
                for (String comment : comments) {
                    Json commentJson = new Json(comment);
                    String commentId = commentJson.jsonPath($_type + ".id").get();
                    String referenceId = commentJson.jsonPath($_type + ".referenceId").get();
                    String date = commentJson.jsonPath($_type + ".creationTime").get();
                    JSONObject contentObject = new JSONObject();
                    contentObject.put("productCommentSummary",JSON.parseObject(productCommentSummary));
                    contentObject.put("comment",JSON.parseObject(comment));
                    try {
                        long releaseTime = DateUtils.parseDate(date, "yyyy-MM-dd HH:mm:ss").getTime();
                        CrawlerData crawlerData = CrawlerData.builder()
                                .data(crawlerRecord, page)
                                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment.enumVal(),productId,referenceId, commentId))
                                .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,productId))
                                .releaseTime(releaseTime)
                                .url("https://item.jd.com/"+productId+".html")
                                .content(JSON.toJSONString(contentObject))
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                                .copyBizTags()
                                .build();
                        crawlerData.tagsCreator().bizTags().addDomain(domain());
                        crawlerDataList.add(crawlerData);
                    } catch (ParseException e) {
                        log.info("jd h5 comment parse date {} is error!",date);
                        log.error(e.getMessage(),e);
                    }
                }
            }
        } catch (Exception e) {
            log.error("jd product detail wash exception: {},request url {}",e.getMessage(),page.getRequest().getUrl());
        }


        return crawlerDataList;
    }

    @Override
    public String domain() {
        return "jingdong";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular("https://wq.jd.com/commodity/comment/getcommentlist\\S*");
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return true;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {
    }


    private void initUserAgents( CrawlerRequestRecord supportSourceRecord){

        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String browsers = rawText.jsonPath($_type + ".browsers").get();
            Map<String,Object> map = new Json(browsers).toObject(Map.class);
            for (Map.Entry<String, Object> entry : map.entrySet()) {
                List<String> agents = (List<String>) entry.getValue();
                userAgents.addAll(agents);
            }
        } catch (Exception e) {
            log.error(e.getMessage(),e);
        }
    }
}
