package com.chance.cc.crawler.development.scripts.vip;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;

/**
 * @ClassName weipinhui
 * @Description TODO
 * @Author songding
 * @Date 2021/10/9 11:26
 * @Version 1.0
 * 唯品会
 **/
public class VipCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(VipCrawlerScript.class);
    public static final String domain = "vip";
    private static final String site = "article";
    private static final String RECORD_AGAIN_REQUEST = "record_again_request";

    private static final String articleIdUrlRegulars = "https://mapi.vip.com/vips-mobile/rest/shopping/pc/search/product/rank\\S*";
    private static final String articleUrlRegulars = "https://mapi.vip.com/vips-mobile/rest/shopping/pc/product/module/list/v2\\S*";
    private static final String commentUrlRegulars= "https://mapi.vip.com/vips-mobile/rest/content/reputation/queryBySpuId_for_pc\\S*";

    private static final String commentUrl = "https://mapi.vip.com/vips-mobile/rest/content/reputation/queryBySpuId_for_pc?callback=getCommentDataCb&app_name=shop_pc&app_version=4.0&warehouse=VIP_SH&fdc_area_id=103101101&client=pc&mobile_platform=1&province_id=103101&api_key=70f71280d5d547b2a7bb370a529aeea1&user_id=&mars_cid=1633744443336_db263722db1fd847f7a3b593cf0b41f4&wap_consumer=a&spuId=%s&brandId=%s&page=%s&pageSize=10&timestamp=%s&";
    private static final String vipUrl = "https://category.vip.com/";
    private static final String articleIdUrl = "https://mapi.vip.com/vips-mobile/rest/shopping/pc/search/product/rank?" +
            "callback=getMerchandiseIds&app_name=shop_pc&app_version=4.0&warehouse=VIP_SH" +
            "&fdc_area_id=103101101&client=pc" +
            "&mobile_platform=1&province_id=103101&api_key=70f71280d5d547b2a7bb370a529aeea1" +
            "&user_id=&mars_cid=1633744443336_db263722db1fd847f7a3b593cf0b41f4&wap_consumer=a" +
            "&standby_id=nature&keyword=%s&lv3CatIds=&lv2CatIds=&lv1CatIds=&brandStoreSns=&" +
            "props=&priceMin=&priceMax=&vipService=&sort=0&pageOffset=%s&channelId=1&gPlatform=PC&batchSize=120&_=%s";


    @Override
    public void initUrlRegulars() {
        addUrlRegular(vipUrl);
        addUrlRegular(articleIdUrlRegulars);
        addUrlRegular(articleUrlRegulars);
        addUrlRegular(commentUrlRegulars);
    }
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> listRecord = new ArrayList<>();
        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord :supportSourceRecords){
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")){
                    HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
                    Json json = internalDownloadPage.getJson();
                    String msg = json.jsonPath($_type+".msg").get();
                    if (!"success".equals(msg)){
                        log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
                        return listRecord;
                    }
                    List<String> all = json.jsonPath($_type + ".content").all();
                    for (String data : all) {
                        JSONObject jsonObject = JSONObject.parseObject(data);
                        String keyword = jsonObject.getString("keyword");
                        String urlRecord = String.format(articleIdUrl,keyword,0,System.currentTimeMillis());
                        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                .itemPageRequest(requestRecord)
                                .httpUrl(urlRecord)
                                .releaseTime(System.currentTimeMillis())
                                .httpHead("referer","https://category.vip.com/")
                                .copyBizTags()
                                .copyResultTags()
                                .build();
                        record.tagsCreator().bizTags().addCustomKV("pageOffset",20);
                        record.tagsCreator().bizTags().addCustomKV("keywords",keyword);
                        listRecord.add(record);
                    }
                }
            }
        }
        return listRecord;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 ||  !page.isDownloadSuccess()){
            log.error("page == [{}] || statusCode != 200 and error page = "+ page.getStatusCode());
            if (page.getStatusCode() != 404){
                recordAgainRequest(crawlerRecord,parseLinks);
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(vipUrl)){
            this.getArticleId(crawlerRecord,page,parseLinks);
        }
        if (url.matches(articleIdUrlRegulars)){
            this.getArticleList(crawlerRecord,page,parseLinks);

        }
        if (url.matches(articleUrlRegulars)){
            this.getCommentUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(commentUrlRegulars)){
            this.getTurnPageCommentUrl(crawlerRecord,page,parseLinks);
        }
        return parseLinks;
    }
    /*
    * 得到下一页评论
    * */
    private void getTurnPageCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String rawText = page.getRawText();
        rawText = rawText.substring(17,rawText.length()-1);
        page.setRawText(rawText);
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        if (all.size()==0){
            log.info("No comment data");
            crawlerRecord.setNeedWashPage(false);
            return;
        }
        String brandId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("brandId");
        String spuId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("spuId");
        KVTag page1 = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("page");
        Integer val = (Integer) page1.getVal();
        val = val +1;
        String url =String.format(commentUrl,spuId,brandId,val,System.currentTimeMillis());
        url = url+"keyWordNlp=%E6%9C%80%E6%96%B0-%E6%8C%89%E6%97%B6%E9%97%B4%E6%8E%92%E5%BA%8F&_="+System.currentTimeMillis();
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .httpHead("referer","https://category.vip.com/")
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("page",val);
        parseLinks.add(record);
    }

    private void getCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String rawText = page.getRawText();
        rawText = rawText.substring(24, rawText.length() - 1);
        page.setRawText(rawText);
        String s = page.getJson().jsonPath($_type + ".data").get();
        JSONObject jsonObject = JSONObject.parseObject(s);
        JSONArray products = jsonObject.getJSONArray("products");
        for (Object obj : products){
            JSONObject jsonObject1 = JSONObject.parseObject((String) obj);
            String brandId = jsonObject1.getString("brandId");
            String spuId = jsonObject1.getString("spuId");
            String url =String.format(commentUrl,spuId,brandId,1,System.currentTimeMillis());
            url = url+"keyWordNlp=%E6%9C%80%E6%96%B0-%E6%8C%89%E6%97%B6%E9%97%B4%E6%8E%92%E5%BA%8F&_="+System.currentTimeMillis();
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .httpHead("referer","https://category.vip.com/")
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            record.tagsCreator().bizTags().addCustomKV("page",1);
            record.tagsCreator().bizTags().addCustomKV("brandId",brandId);
            record.tagsCreator().bizTags().addCustomKV("spuId",spuId);
            parseLinks.add(record);
        }
    }

    /*
    * 得到文章链接拼接的字符
    * 下一页
    * */
    private void getArticleList(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String rawText = page.getRawText();
        rawText = rawText.substring(18,rawText.length()-1);
        page.setRawText(rawText);
        JSONArray products = null;
        try {
            String s1 = page.getJson().jsonPath($_type + ".data").get();
            JSONObject data = JSONObject.parseObject(s1);
            products = data.getJSONArray("products");
        }catch (Exception e){
            log.error("No next page");
        }
        if (products == null){
            return;
        }
        StringBuffer pids = new StringBuffer();
        StringBuffer pids1 = new StringBuffer();
        String url = "https://mapi.vip.com/vips-mobile/rest/shopping/pc/product/module/list/v2?callback=getMerchandiseDroplets1&app_name=shop_pc&app_version=4.0&warehouse=VIP_SH&fdc_area_id=103101101&client=pc&mobile_platform=1&province_id=103101&api_key=70f71280d5d547b2a7bb370a529aeea1&user_id=&mars_cid=1633744443336_db263722db1fd847f7a3b593cf0b41f4&wap_consumer=a&productIds=";
        String params = "&scene=search&standby_id=nature&extParams=%7B%22stdSizeVids%22%3A%22%22%2C%22preheatTipsVer%22%3A%223%22%2C%22couponVer%22%3A%22v2%22%2C%22exclusivePrice%22%3A%221%22%2C%22iconSpec%22%3A%222x%22%2C%22ic2label%22%3A1%7D&context=&_="+System.currentTimeMillis();
        int i = 0;
        int m = 1;
        int a = 0;
        int j = 1;
        for (Object obj : products){
            if (a < 100){
                a++;
                if (i>50){
                    i = 0;
                    pids = new StringBuffer();
                }else{
                    JSONObject jsonObject = JSONObject.parseObject(String.valueOf(obj));
                    String pid = jsonObject.getString("pid");
                    pids.append(pid).append("%2C");
                    i++;
                    m++;
                }
                if (m == 51){
                    m = 1;
                    String urls = url + pids + params;
                    CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRecord)
                            .httpUrl(urls)
                            .releaseTime(System.currentTimeMillis())
                            .httpHead("referer","https://category.vip.com/")
                            .copyBizTags()
                            .copyResultTags()
                            .build();
                    parseLinks.add(record);
                }
            }else{
                if (j <=19){
                    j++;
                    JSONObject jsonObject = JSONObject.parseObject(String.valueOf(obj));
                    String pid = jsonObject.getString("pid");
                    pids1.append(pid).append("%2C");
                }
            }
            if (j==20){
                String urls = url + pids1 + params;
                CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(urls)
                        .releaseTime(System.currentTimeMillis())
                        .httpHead("referer","https://category.vip.com/")
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parseLinks.add(record);
                return;
            }
        }
        KVTag kvTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("pageOffset");
        Integer pageOffset = (Integer) kvTag.getVal();
        String pageUrl = String.format(articleIdUrl,pageOffset,pageOffset,System.currentTimeMillis());
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(pageUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("referer","https://category.vip.com/")
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("pageOffset",pageOffset+20);
        parseLinks.add(record);

    }

    //得到每个文章的id
    private void getArticleId(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String key = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("key");
        if (key == null){
            return;
        }
        String url = String.format(articleIdUrl,key,0,System.currentTimeMillis());
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .httpHead("referer","https://category.vip.com/")
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("pageOffset",20);
        parseLinks.add(record);
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            if (url.matches(articleUrlRegulars)){
                this.washArticleUrl(crawlerRecord,page,dataList);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.comment)){
            if (url.matches(commentUrlRegulars)){
                this.washComment(crawlerRecord,page,dataList);
            }
        }
        return dataList;
    }

    private void washArticleUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<String> all = page.getJson().jsonPath($_type + ".data.products").all();
        for(String str : all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String title = jsonObject.getString("title");//标题
            String brandId = jsonObject.getString("brandId");//id
            String squareImage = jsonObject.getString("squareImage");//图片
            String productId = jsonObject.getString("productId");
            JSONObject price = jsonObject.getJSONObject("price");
            String marketPrice = price.getString("marketPrice");//打折前
            String salePrice = price.getString("salePrice");//打折后
            boolean commodity_discount = false;
            if (Double.valueOf(marketPrice) > Double.valueOf(salePrice)){
                commodity_discount = true;
            }else{
                commodity_discount = false;
            }
            String url = "https://detail.vip.com/detail-"+brandId+"-"+productId+".html";
            CrawlerData crawlerArticle = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article,brandId))
                    .url(url)
                    .releaseTime(System.currentTimeMillis())
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .addContentKV(AICCommonField.Field_Title,title)
                    .addContentKV(AICCommonField.Field_Images,squareImage)
                    .addContentKV(AICCommonField.Field_Present_Price,salePrice)
                    .addContentKV(AICCommonField.Field_Original_Price,marketPrice)
                    .addContentKV(AICCommonField.Field_Commodity_Discount, String.valueOf(commodity_discount))
                    .build();
            dataList.add(crawlerArticle);
            if (crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("monthly") != null){
                CrawlerData crawlerUrl = CrawlerData.builder()
                        .data(crawlerRecord,page)
                        .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article,brandId))
                        .url(url)
                        .releaseTime(System.currentTimeMillis())
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                        .addContentKV(AICCommonField.Field_Urls,url)
                        .build();
                dataList.add(crawlerUrl);
            }
        }
    }

    private void washComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String rawText = page.getRawText();
        rawText = rawText.substring(17,rawText.length()-1);
        page.setRawText(rawText);
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        String brandId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("brandId");
        for (String str : all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            JSONObject reputation = jsonObject.getJSONObject("reputation");
            JSONObject reputationProduct = jsonObject.getJSONObject("reputationProduct");
            JSONObject reputationUser = jsonObject.getJSONObject("reputationUser");
            String title = reputationProduct.getString("titleNoBrand");
            String midStr = reputationProduct.getString("midStr");
            String brandName = reputationProduct.getString("brandName");
            String content = reputation.getString("content");
            String postTime = reputation.getString("postTime");
            if (!isDateRange(crawlerRecord, Long.valueOf(postTime))){
                continue;
            }
            String authorName= reputationUser.getString("authorName");
            String vips = reputationUser.getString("vips");
            CrawlerData crawlerArticle = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment,midStr))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article,brandId))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(Long.parseLong(postTime))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .addContentKV(AICCommonField.Field_Title,title)
                    .addContentKV(AICCommonField.Field_Content,content)
                    .addContentKV(AICCommonField.Field_Author,authorName)
                    .addContentKV("brand",brandName)
                    .build();
            dataList.add(crawlerArticle);

            CrawlerData crawlerInteraction = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.interaction,midStr))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment,midStr))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(Long.parseLong(postTime))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                    .addContentKV(AICCommonField.Field_I_Likes,vips)
                    .build();
            dataList.add(crawlerInteraction);
        }
    }


    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
    private static void recordAgainRequest(CrawlerRequestRecord crawlerRequestRecord,List<CrawlerRequestRecord> parseList){
        int count = 0;
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(RECORD_AGAIN_REQUEST)){
            count = Integer.valueOf(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(RECORD_AGAIN_REQUEST));
            if (count >= 10){
                log.error("url excessive number of repeated downloads this url = "+url);
            }
        }else{
            count = 1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .releaseTime(System.currentTimeMillis())
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey()+count)
                .copyResultTags()
                .copyBizTags()
                .build();

        record.getHttpRequest().setCookies(crawlerRequestRecord.getHttpRequest().getCookies());
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        record.getHttpRequest().setHeaders(crawlerRequestRecord.getHttpRequest().getHeaders());
        record.setNeedParsedPage(crawlerRequestRecord.isNeedParsedPage());
        record.setNeedWashPage(crawlerRequestRecord.isNeedWashPage());
        record.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count++);
        parseList.add(record);

    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
}
