package com.chance.cc.crawler.development.scripts.jingdong;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;

import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Domain_Result_Json;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_ECommerce_List_Content;

/**
 * @author bx
 * @date 2020/11/13 0013 15:41
 */
public class JDDetailCrawlerScript extends CrawlerCommonScript {

    private Logger log = LoggerFactory.getLogger(JDDetailCrawlerScript.class);

    public static final String detailUrlFormat = "https://item.m.jd.com/product/%s.html";
    public static final String detailProductId = "productId";

    public static final String defaultUserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36";

    public static final String itemOnly = "window._itemOnly =  (";
    public static final String itemInfo = "window._itemInfo = (";

    private static LinkedBlockingQueue<String> userAgents = new LinkedBlockingQueue<>();
    public static Object uaObject = new Object();

    public static final String commentUrl = "https://rate.taobao.com/feedRateList.htm?auctionNumId=%s&userNumId=%s&currentPageNum=%s&pageSize=20&folded=0&orderType=feedbackdate&callback=jsonp_tbcrate_reviews_list";

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {

        HttpRequest httpRequest = requestRecord.getHttpRequest();

        if(supportSourceRecords !=null && supportSourceRecords.size()  > 0){
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                if (supportSourceRecord.getHttpRequest().getUrl().contains("fake-useragent.herokuapp.com/browsers")){
                    initUserAgents(supportSourceRecord); //初始user agents
                }
            }
        }

        if(userAgents.size() <1){
            synchronized (uaObject){
                if (userAgents.size() <1){
                    try {
                        userAgents.addAll(IOUtils.readLines(new FileInputStream("/data/chance_crawler_runner/data/ua/ua_jingdong.txt"), "utf-8"));
                    } catch (IOException e) {
                        log.error(e.getMessage());
                    }
                }
            }
        }

        if(requestRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){

            KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(Tag_Field_Domain_Result_Json);
            CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()),CrawlerDomainUrls.class);
            String url = crawlerDomainUrls.getUrl();
            Json urlJson = new Json(url);
            String itemId= urlJson.jsonPath($_type + ".itemId").get();
            String jdCommentUrl = String.format(detailUrlFormat, itemId);
            httpRequest.setUrl(jdCommentUrl);
            httpRequest.getHeaders().put("referer","https://shop.m.jd.com/");
            httpRequest.getHeaders().put("Accept","*/*");
            httpRequest.addExtra(detailProductId,itemId);
            requestRecord.setDownload(true);
            requestRecord.setNeedParsedPage(true);
            requestRecord.setNeedWashPage(true);
            requestRecord.setSkipPipeline(false);
            requestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_ECommerce_List_Content,url);
            requestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json); //移除

        }

        String ua = userAgents.poll();
        log.info("poll new ua: "+ ua);
        userAgents.add(ua);
        httpRequest.getHeaders().put("user-agent",ua);

        return super.prepareRequest(requestRecord,supportSourceRecords);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        //解析下载数据是否正常，不正常则放入下次循环，不再进行数据清洗
        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if (!page.isDownloadSuccess()){
            log.error("jingdong product detail proxy has error ,will retry");
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        try {
            String rawText = page.getRawText();

            //            <title>多快好省，购物上京东！</title>
            if (StringUtils.isNotBlank(rawText)&&
                    rawText.contains("<title>多快好省，购物上京东！</title>")){ //跳转到了首页，商品不存在
                log.warn("jingdong product {} is not exist! ",page.getRequest().getUrl());
                crawlerRecord.setNeedWashPage(false);
                return crawlerRequestRecords;
                // todo 后面要做到自动去除库中不存在的商品信息，目前先输出打印
            }

            if (StringUtils.isBlank(rawText)  || !page.getRawText().contains("window._itemInfo")){
                Thread.sleep(1500);
                throw new Exception("jingdong product detail request url "+page.getRequest().getUrl()+" download failed, will retry");
            }
        } catch (Exception e) {
            log.error("jingdong product detail download failed,error message {}", e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);

            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
        }
        return crawlerRequestRecords;
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

        int count = 1;
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
            if (count >= 500){
                log.error("jingdong product detail download he number of retries exceeds the limit" +
                        ",request url {}",crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        count++;
        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .needWashed(true)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .build();
        crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        try {
            String productId = crawlerRecord.getHttpRequest().getExtras().get(detailProductId).toString();

            List<Selectable> nodes = page.getHtml().xpath("//script").nodes();
            String itemInfoData = null;
            String itemOnlyData = null;
            for (Selectable node : nodes) {
                String script = StringUtils.joinWith("", node.all().toArray());
                script = StringEscapeUtils.unescapeHtml(script).trim();

                if (script.contains(itemInfo)) {
                    itemInfoData = script.substring(script.indexOf(itemInfo) + itemInfo.length(), script.indexOf(");"));
                }

                if (script.contains(itemOnly)){
                    itemOnlyData = script.substring(script.indexOf(itemOnly) + itemOnly.length(), script.indexOf(");"));
                }
            }

            if (StringUtils.isNotBlank(itemInfoData) && StringUtils.isNotBlank(itemOnlyData)){
                JSONObject itemObject = JSON.parseObject(itemInfoData);
                JSONObject onlyObject = JSON.parseObject(itemOnlyData);
                JSONObject jdProductDetail = new JSONObject();
                jdProductDetail.putAll(itemObject);
                jdProductDetail.putAll(onlyObject);
                itemInfoData = JSON.toJSONString(jdProductDetail);
            }

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(),productId))
                    .releaseTime(System.currentTimeMillis())
                    .content(itemInfoData)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .copyBizTags()
                    .build();
            crawlerData.tagsCreator().bizTags().addDomain(domain());
            crawlerDataList.add(crawlerData);

        } catch (Exception e) {
            log.error("jingdong product detail wash exception: {},request url {}",e.getMessage(),page.getRequest());
        }


        return crawlerDataList;
    }

    @Override
    public String domain() {
        return "jingdong";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular("https://item.m.jd.com/product/\\S*");
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        return true;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    private void initUserAgents( CrawlerRequestRecord supportSourceRecord){

        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String browsers = rawText.jsonPath($_type + ".browsers").get();
            Map<String,Object> map = new Json(browsers).toObject(Map.class);
            for (Map.Entry<String, Object> entry : map.entrySet()) {
                List<String> agents = (List<String>) entry.getValue();
                userAgents.addAll(agents);
            }
        } catch (Exception e) {
            log.error(e.getMessage(),e);
        }
    }
}
