package com.chance.cc.crawler.development.scripts.womai;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;

/**
 * @ClassName womai
 * @Description TODO
 * @Author songding
 * @Date 2021/10/19 9:54
 * @Version 1.0
 * 中粮我买网
 **/
public class WomaiCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(WomaiCrawlerScript.class);
    private static final String domain = "womai";
    private static final String site = "search";

    private static final String womaiUrl = "https://sh.womai.com";
    private static final String womaiComments = "https://sh.womai.com/green2012/product/remarklist.do?id=%s&mid=100&starlevel=0&page=%s&isremark=1";

    private static final String womaiListRegulars = "https://sh.womai.com/ProductList.htm\\S*";
    private static final String womaiArticleRegulars = "https://www.womai.com/Product\\S*";
    private static final String womaiCommentRegulars = "https://sh.womai.com/toremark/\\S*";
    private static final String womaiCommentsRegulars = "https://sh.womai.com/green2012/product/remarklist.do\\S*";


    private static final String params1 = "ProductList.htm?isKeyCommendClick=1&&zhId=605&&searchlist=1&&mainColumnId=-1&&";
    private static final String params2 = "topKeywords=%s";
    private static final String params3 = "&&Cid=606&&ajaxprolist=1&&mid=100&&rypId=608&&";
    private static final String params4 = "Keywords=%s";
    private static final String params5 = "%2Ctitle%2Cmer_title%2Cmer_title_%2Cbrand%2CcloumnName%2Ckeyword%2Ckeywords%2CarticleRuleTitle%2CactiveName%2CProductFeatures&";
    private static final String params6 = "page=%s";

    @Override
    public void initUrlRegulars() {
        addUrlRegular(womaiUrl);
        addUrlRegular(womaiListRegulars);
        addUrlRegular(womaiArticleRegulars);
        addUrlRegular(womaiCommentRegulars);
        addUrlRegular(womaiCommentsRegulars);
    }
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> listRecord = new ArrayList<>();
        String seriesUrl = requestRecord.getHttpRequest().getUrl();

        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord :supportSourceRecords){
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")){
                    HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
                    Json json = internalDownloadPage.getJson();
                    String msg = json.jsonPath($_type+".msg").get();
                    if (!"success".equals(msg)){
                        log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
                        return listRecord;
                    }
                    List<String> all = json.jsonPath($_type + ".content").all();
                    for (String data : all) {
                        JSONObject jsonObject = JSONObject.parseObject(data);
                        String keyword = jsonObject.getString("keywords");
                        keyword = this.toURl(keyword);
                        int pageSize = 1;
                        String url1 = womaiUrl+"/"+params1+String.format(params2,keyword)+params3+String.format(params4,keyword)+params5+String.format(params6,pageSize);
                        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                .itemPageRequest(requestRecord)
                                .httpUrl(url1)
                                .releaseTime(System.currentTimeMillis())
                                .copyBizTags()
                                .copyResultTags()
                                .build();
                        record.tagsCreator().bizTags().addCustomKV("pageSize",pageSize);
                        record.tagsCreator().bizTags().addCustomKV("q",keyword);
                        listRecord.add(record);
                        requestRecord.setNeedParsedPage(false);
                    }
                }
            }
        }
        return listRecord;
    }


    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 || page.isDownloadSuccess() != true){
            log.error("download error or page != 200  code="+page.getStatusCode());
            this.recordAgainDownload(crawlerRecord,page,parseLinks);
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(womaiUrl)){
            this.getListUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(womaiListRegulars)){
            this.getListUrlTurn(crawlerRecord,page,parseLinks);
        }
        if (url.matches(womaiArticleRegulars)){
            this.getCommentUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(womaiCommentRegulars)){
            this.getCommentTurnUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(womaiCommentsRegulars)){
            this.getCommentsTurnUrl(crawlerRecord,page,parseLinks);
        }
        return parseLinks;
    }

    private void getCommentsTurnUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String id = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        KVTag commentPageSize = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("commentPageSize");
        Integer val = (Integer) commentPageSize.getVal();
        val = val + 1;
        List<Selectable> all = page.getHtml().xpath("//*[@id=\"publicPraiseListDiv\"]").nodes();
        if (all.size() == 0){
            return;
        }
        String url = String.format(womaiComments,id,val);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.getHttpRequest().setMethod("post");
        record.tagsCreator().bizTags().addCustomKV("commentPageSize",val);
        parseLinks.add(record);
    }

    private void getCommentTurnUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String id = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        int commentPageSize = 1;
        String url = String.format(womaiComments,id,commentPageSize);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.getHttpRequest().setMethod("post");
        record.tagsCreator().bizTags().addCustomKV("commentPageSize",commentPageSize);
        parseLinks.add(record);
    }

    private void getCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String url = page.getHtml().xpath("//*[@id=\"sortRemark\"]/div[1]/div[2]/a/@href").get();
        if (url == null){
            return;
        }
        String id = page.getHtml().xpath("//div[@id=\"preview\"]/span/@id").get();
        String[] s = id.split("_");
        id = s[s.length-1];
        url = womaiUrl + url;
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("id",id);
        parseLinks.add(record);
    }

    private void getListUrlTurn(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String q = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("q");
        KVTag pageSize = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("pageSize");
        Integer val = (Integer) pageSize.getVal();
        val = val + 1;
        List<Selectable> all = page.getHtml().xpath("/html/body/div[3]/ul/li").nodes();
        if (all.size() == 0){
            return;
        }else{
            for (Selectable node : all){
                String url = node.xpath("./div/div[4]/p/a/@href").get();
                url = url.replaceAll("100","0");
                url = "https://www.womai.com" + url;
                CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(url)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parseLinks.add(record);
            }
        }
        String url = womaiUrl+"/"+params1+String.format(params2,q)+params3+String.format(params4,q)+params5+String.format(params6,val);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("pageSize",val);
        record.tagsCreator().bizTags().addCustomKV("q",q);
        parseLinks.add(record);
    }


    private void getListUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String q = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("q");
        q = this.toURl(q);
        int pageSize = 1;
        String url = womaiUrl+"/"+params1+String.format(params2,q)+params3+String.format(params4,q)+params5+String.format(params6,pageSize);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("pageSize",pageSize);
        record.tagsCreator().bizTags().addCustomKV("q",q);
        parseLinks.add(record);
    }


    private String toURl(String q){
        String urlParams = null;
        try {
            urlParams = URLEncoder.encode(q,"gbk");
        } catch (UnsupportedEncodingException e) {
            e.printStackTrace();
        }
        return urlParams;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            if (url.matches(womaiArticleRegulars) && !url.matches(womaiListRegulars)){
                this.washArticle(crawlerRecord,page,dataList);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.comment)){
             if (url.matches(womaiCommentsRegulars)){
                this.washComment(crawlerRecord,page,dataList);
            }
        }
        return dataList;
    }

    private void washComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String id = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        List<Selectable> nodes = page.getHtml().xpath("//ul[@class=\"reply_evaluate\"]/li").nodes();
        if (nodes.size() == 0){
            nodes = page.getHtml().xpath("//*[@id=\"publicPraiseListDiv\"]/li").nodes();
        }
        for (Selectable node : nodes){
            String time = node.xpath("./div/div[2]/dl/dd[2]//text()").get();
            if (time == null){
                time = node.xpath("./div[2]/dl/dd[2]//@value").get();
                time = time.substring(0,time.length()-5);
            }
            Long releaseTime = 0l;
            try {
                releaseTime = DateUtils.parseDate(time,"yyyy-MM-dd HH:mm").getTime();
            } catch (ParseException e) {
                log.error("time parse error");
            }
            if (!isDateRange(crawlerRecord,releaseTime)){
                continue;
            }
            String authorName = node.xpath("./div[2]/dl/dt/text()").get();
            String content = node.xpath("./div[2]/div[2]/p/text()").get();
            List<Selectable> imgNodes = node.xpath("./div[2]/ul/li").nodes();
            if (imgNodes == null){
                imgNodes = node.xpath("./div/div[2]/div/ul/li").nodes();
                authorName = node.xpath("./div/div[2]/dl/dt/text()").get();
                content = node.xpath("./div/div[2]/p[1]/a/text()").get();
            }
            StringBuffer str = new StringBuffer();
            for (Selectable imgNode : imgNodes){
                String imgUrl = imgNode.xpath("./img/@src").get();
                str.append("0x1").append(imgUrl);
            }
            String like = node.xpath("./div[2]/div[3]/a[1]").get();
            like = like.substring(0,like.length()-2);
            int commentId = new Random().nextInt(1000000000);
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,commentId))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,id))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(AICCommonField.Field_Content,content)
                    .addContentKV(AICCommonField.Field_Images, String.valueOf(str))
                    .addContentKV(AICCommonField.Field_Author,authorName)
                    .build();
            dataList.add(crawlerData);
            CrawlerData crawlerInteraction = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.interaction,commentId+12))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,commentId))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(AICCommonField.Field_I_Likes,like)
                    .addContentKV(AICCommonField.Field_I_Comments, "0")
                    .build();
            dataList.add(crawlerInteraction);
        }
    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String img = page.getHtml().xpath("//div[@id=\"preview\"]/span/img/@src").get();
        String id = page.getHtml().xpath("//div[@id=\"preview\"]/span/@id").get();
        String[] s = id.split("_");
        id = s[s.length-1];
        String content = page.getHtml().xpath("//div[@id=\"preview\"]/span/img/@alt").get();
       // String price = page.getHtml().xpath("//[@id=\"buyPrice\"]/text()").get();
        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRecord,page)
                .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,id))
                .url(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(System.currentTimeMillis())
                .addContentKV(AICCommonField.Field_Content,content)
                .addContentKV(AICCommonField.Field_Images,img)
               // .addContentKV("price",price)
                .build();
        dataList.add(crawlerData);
    }


    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
    private void recordAgainDownload(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String count = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("count");
        Integer integerCount = 0;
        if (count == null){
            integerCount = 1;
        }else{
            integerCount = Integer.valueOf(count);
            if (integerCount >= 10){
                log.error("The number of downloads exceeds the limit");
                return;
            }
            integerCount+=1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .recordKey(crawlerRecord.getRecordKey()+integerCount)
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("count",integerCount);
        parseLinks.add(record);
    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
}
