package com.chance.cc.crawler.development.scripts.sn;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;

/**
 * @ClassName suning
 * @Description TODO
 * @Author songding
 * @Date 2021/10/18 16:57
 * @Version 1.0
 * 苏宁  关键词查询
 **/
public class SNCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(SNCrawlerScript.class);
    public static final String domain = "suning";
    private static final String site = "search";

    private static final String sn = "https://search.suning.com/";
    private static final String snListUrl = "https://search.suning.com/emall/mobile/wap/clientSearch.jsonp?cityId=021&keyword=%s&channel=&cp=%s&ps=10&st=0&set=5&cf=&iv=-1&ci=&ct=-1&channelId=WAP&sp=&sg=&sc=&prune=&operate=0&isAnalysised=1&istongma=1&jlfstoreCode=&jlfOnly=0&jlftownCode=&saleMode=&v=99999999&yjhx=&sesab=ABB0AA";
    private static final String snCommentUrl = "https://review.suning.com/mobile/business/mobile/getClusterReviewListOrFoldReviewListVI/style--0000000%s-%s--total-%s-default-10-----normal--callback3.htm";

    private static final String snListRegulars = "https://search.suning.com/emall/mobile/wap/clientSearch.jsonp\\S*";
    private static final String snCommentRegulars = "https://review.suning.com/mobile/business/mobile/\\S*";

    @Override
    public void initUrlRegulars() {
        addUrlRegular(sn);
        addUrlRegular(snListRegulars);
        addUrlRegular(snCommentRegulars);
    }

    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> listRecord = new ArrayList<>();
        String seriesUrl = requestRecord.getHttpRequest().getUrl();

        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord :supportSourceRecords){
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")){
                    HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
                    Json json = internalDownloadPage.getJson();
                    String msg = json.jsonPath($_type+".msg").get();
                    if (!"success".equals(msg)){
                        log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
                        return listRecord;
                    }
                    List<String> all = json.jsonPath($_type + ".content").all();
                    for (String data : all) {
                        JSONObject jsonObject = JSONObject.parseObject(data);
                        String keyword = jsonObject.getString("keyword");
                        int pageSize = 1;
                        String startUrl =String.format(snListUrl,keyword,pageSize) ;
                        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                .itemPageRequest(requestRecord)
                                .httpUrl(startUrl)
                                .releaseTime(System.currentTimeMillis())
                                .copyResultTags()
                                .copyBizTags()
                                .build();
                        record.tagsCreator().bizTags().addCustomKV("keywords",keyword);
                        record.tagsCreator().bizTags().addCustomKV("pageSize",pageSize);
                        listRecord.add(record);
                    }
                }
            }
        }
    return listRecord;
    }
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 || page.isDownloadSuccess() != true){
            log.error("download error or page != 200  code="+page.getStatusCode());
            this.recordAgainDownload(crawlerRecord,page,parseLinks);
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(sn)){
            this.getListUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(snListRegulars)){
            this.getTurnListUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(snCommentRegulars)){
            this.getTurnComment(crawlerRecord,page,parseLinks);
        }
        return parseLinks;
    }

    private void getTurnComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String rawText = page.getRawText();
        rawText = rawText.substring(10, rawText.length() - 1);
        page.setRawText(rawText);
        List<String> all = page.getJson().jsonPath($_type + ".commodityReviews").all();
        if (all.size()==0){
            return;
        }
        String catentryId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("catentryId");
        String salesCode = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("salesCode");
        KVTag commentPage = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("commentPage");
        Integer val = (Integer) commentPage.getVal();
        val = val + 1;
        String urlComment = String.format(snCommentUrl,catentryId,salesCode,val);
        CrawlerRequestRecord urlCommentrecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(urlComment)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        urlCommentrecord.tagsCreator().bizTags().addCustomKV("commentPage",val);
        parseLinks.add(urlCommentrecord);
    }

    private void getTurnListUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String keywords = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("keywords");
        KVTag pageSize = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("pageSize");
        Integer val = (Integer) pageSize.getVal();
        val = val + 1;
        try{
            List<String> all = page.getJson().jsonPath($_type + ".goods").all();
            if (all.size() ==0){
                return;
            }
        }catch (Exception e){
            log.error("no page");
            return;
        }
        String url = String.format(snListUrl,keywords,val);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("keywords",keywords);
        record.tagsCreator().bizTags().addCustomKV("pageSize",val);
        parseLinks.add(record);

        List<String> all = page.getJson().jsonPath($_type + ".goods").all();
        for (String str : all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String catentryId = jsonObject.getString("catentryId");//1
            String salesCode = jsonObject.getString("salesCode");//2
            int commentPage = 1;
            String urlComment = String.format(snCommentUrl,catentryId,salesCode,commentPage);
            CrawlerRequestRecord urlCommentrecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(urlComment)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            urlCommentrecord.tagsCreator().bizTags().addCustomKV("catentryId",catentryId);
            urlCommentrecord.tagsCreator().bizTags().addCustomKV("salesCode",salesCode);
            urlCommentrecord.tagsCreator().bizTags().addCustomKV("commentPage",commentPage);
            parseLinks.add(urlCommentrecord);
        }

    }

    private void getListUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String keywords = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("q");
        int pageSize = 1;
        String url = String.format(snListUrl,keywords,pageSize);

        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("keywords",keywords);
        record.tagsCreator().bizTags().addCustomKV("pageSize",pageSize);
        parseLinks.add(record);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            if (url.matches(snListRegulars)){
                this.washArticle(crawlerRecord,page,dataList);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.comment)){
            if (url.matches(snCommentRegulars)){
                this.washComment(crawlerRecord,page,dataList);
            }
        }
        return dataList;
    }

    private void washComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String returnMsg = page.getJson().jsonPath($_type + ".returnMsg").get();
        if (!returnMsg.equals("成功取得评价列表")) {
            return;
        }
        List<String> all = page.getJson().jsonPath($_type + ".commodityReviews").all();
        for (String str : all) {
            JSONObject jsonObject = JSONObject.parseObject(str);
            String commodityReviewId = jsonObject.getString("commodityReviewId");
            String content = jsonObject.getString("content");
            String publishTimeStr = jsonObject.getString("publishTimeStr");
            Long time = 0l;
            try {
                time = DateUtils.parseDate(publishTimeStr,"yyyy-MM-dd HH:mm:ss").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }
            if (!isDateRange(crawlerRecord,time)){
                return;
            }
            String nickName = jsonObject.getString("nickName");
            String levelId = jsonObject.getString("levelId");
            String catentryId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("catentryId");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,commodityReviewId))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,catentryId))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(AICCommonField.Field_Author,content)
                    .addContentKV(AICCommonField.Field_Images,nickName)
                    .addContentKV(AICCommonField.Field_Author_Id,levelId)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .build();
            dataList.add(crawlerData);
        }
    }
    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<String> all = page.getJson().jsonPath($_type+".goods").all();
        for (String str : all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String catentdesc = jsonObject.getString("catentdesc");
            String price = jsonObject.getString("price");
            String dynamicImg = jsonObject.getString("dynamicImg");
            String catentryId = jsonObject.getString("catentryId");
            String salesCode = jsonObject.getString("salesCode");
            String url = "https://m.suning.com/product/"+catentryId+"/"+catentryId+".html";
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,catentryId))
                    .url(url)
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(AICCommonField.Field_Content,catentdesc)
                    .addContentKV(AICCommonField.Field_Images,dynamicImg)
                    .addContentKV(AICCommonField.Field_Produce_Price,price)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .build();
            dataList.add(crawlerData);
            if (crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("monthly")!=null){
                CrawlerData crawlerUrl = CrawlerData.builder()
                        .data(crawlerRecord,page)
                        .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,catentryId))
                        .url(url)
                        .releaseTime(System.currentTimeMillis())
                        .addContentKV(AICCommonField.Field_Urls,url)
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                        .build();
                dataList.add(crawlerUrl);
            }
        }

    }

    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
    private void recordAgainDownload(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String count = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("count");
        Integer integerCount = 0;
        if (count == null){
            integerCount = 1;
        }else{
            integerCount = Integer.valueOf(count);
            if (integerCount >= 10){
                log.error("The number of downloads exceeds the limit");
                return;
            }
            integerCount+=1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .recordKey(crawlerRecord.getRecordKey()+integerCount)
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("count",integerCount);
        parseLinks.add(record);
    }
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
}
