package com.chance.cc.crawler.development.scripts.autohome.article;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Addr_Info;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Brand;

/**
 * @author lt
 * @version 1.0
 * @date 2021-02-18 16:30:07
 * @email okprog@sina.com
 */
public class AutoHomeDealerArticleCrawlerScript extends CrawlerCommonScript {
    private Logger logger = LoggerFactory.getLogger(AutoHomeDealerArticleCrawlerScript.class);
    public static final String indexRegex = "https?://dealer\\.autohome\\.com\\.cn/all/";
    public static final String dealerListUrlRegex = "https?://dealer\\.autohome\\.com\\.cn/\\w*/0/\\d*/0/0/\\d*/0/0/0\\.html";
    public static final String saleListUrlRegex = "https?://dealer\\.autohome\\.com\\.cn/\\d*/newslist_c0_s0_p\\d*\\.html";
    public static final String newsListUrlRegex = "https?://dealer\\.autohome\\.com\\.cn/\\d*/informationList_c0_s0_p\\d*\\.html";
    public static final String articleUrlRegex = "https?://dealer\\.autohome\\.com\\.cn/\\d*/news_\\d*\\.html";
    public static final String contentUrlRegex = "https://dealer\\.autohome\\.com\\.cn/News/GetNewsContent\\S*";

    public static final String dealerListUrlFormat = "https://dealer.autohome.com.cn/%s/0/%s/0/0/%s/0/0/0.html";
    public static final String saleListUrlFormat = "https://dealer.autohome.com.cn/%s/newslist_c0_s0_p%s.html";
    public static final String newsListUrlFormat = "https://dealer.autohome.com.cn/%s/informationList_c0_s0_p%s.html";
    public static final String newsContentUrlFormat = "https://dealer.autohome.com.cn/News/GetNewsContent?dealerId=%s&newsid=%s";
    private static final String scriptSite = "dealer";

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || statusCode != 200){
            if (statusCode == 404){
                logger.error("页面不存在：{}" , statusCode);
                return parsedLinks;
            }
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            logger.error("页面下载状态：{}，状态码：{}，内容为空：{}，实行回推",httpPage.isDownloadSuccess(),statusCode,StringUtils.isBlank(httpPage.getRawText()));
            return parsedLinks;
        }
        if (lastRequestUrl.matches(indexRegex)){
            return parseIndexLink(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(dealerListUrlRegex)){
            return parseDealerListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(saleListUrlRegex) || lastRequestUrl.matches(newsListUrlRegex)){
            return parseListUrlLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(articleUrlRegex)){
            return parseNewsContent(crawlerRequestRecord, httpPage, parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseNewsContent(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String requestUrl = httpRequest.getUrl();
        Map<String, Object> extras = httpRequest.getExtras();
        String shopId = (String)extras.get("shopId");
        String articleKey = requestUrl.split("_")[1].split("\\.")[0];
        String newsContentUrl = String.format(newsContentUrlFormat,shopId,articleKey);
        CrawlerRequestRecord contentRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(newsContentUrl)
                .recordKey(newsContentUrl)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        parsedLinks.add(contentRecord);

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListUrlLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String requestUrl = httpRequest.getUrl();
        Map<String, Object> listExtras = httpRequest.getExtras();
        Html html = httpPage.getHtml();
        List<Selectable> itemNodes = html.xpath("//div[@class=\"dealeron-cont\"]/dl").nodes();
        String pageNext = html.xpath("//a[@class=\"page-next\"]/@href").get();
        if (null != itemNodes && itemNodes.size() > 0){
            String shopId = (String)listExtras.get("shopId");
            if (StringUtils.isNotBlank(pageNext)){
                int curPage = Integer.parseInt((String)listExtras.get("curPage"));
                curPage += 1;
                String nextPageUrl = "";
                if (requestUrl.matches(saleListUrlRegex)){
                    nextPageUrl = String.format(saleListUrlFormat,shopId,curPage);
                }
                if (requestUrl.matches(newsListUrlRegex)){
                    nextPageUrl = String.format(newsListUrlFormat,shopId,curPage);
                }
                CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextPageUrl)
                        .recordKey(nextPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                Map<String, Object> nextExtras = copyExtras(listExtras);
                nextExtras.put("curPage",curPage);
                nextPageRecord.getHttpRequest().setExtras(nextExtras);
                parsedLinks.add(nextPageRecord);
            }
            for (Selectable itemNode : itemNodes) {
                String itemUrl = itemNode.xpath("./dd/p[@class=\"name font-yh\"]/a/@href").get();
                if (StringUtils.isNotBlank(itemUrl) && !itemUrl.startsWith("https:")){
                    itemUrl = "https://dealer.autohome.com.cn" + itemUrl.split("\\?")[0];
                }
                String pubTime = itemNode.xpath("./dd/p[@class=\"date\"]/span[@class=\"date-time\"]/text()").get();
                pubTime = pubTime.split("：")[1];
                List<String> promotions = itemNode.xpath("./dd/p[@class=\"date\"]/span[2]//text()").all();

                try {
                    CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRequestRecord)
                            .httpUrl(itemUrl)
                            .recordKey(itemUrl)
                            .releaseTime(DateUtils.parseDate(pubTime,"yyyy-MM-dd").getTime())
                            .copyBizTags()
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                            .needParsed(true)
                            .needWashed(true)
                            .build();
                    itemRecord.getHttpRequest().addExtra("shopId",shopId);
                    if (null != promotions && promotions.size() > 0){
                        StringBuffer sbPromotions = new StringBuffer();
                        for (String promotion : promotions) {
                            sbPromotions.append(promotion);
                        }
                        itemRecord.getHttpRequest().addExtra("promotions",sbPromotions.toString());
                    }
                    parsedLinks.add(itemRecord);

                }catch (Exception e){
                    logger.error("parse date error");
                }
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseDealerListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> lastExtras = httpRequest.getExtras();
        Html html = httpPage.getHtml();
        String paging = html.xpath("//div[@id=\"pagination\"]/a/@href").get();
        List<Selectable> allShopNodes = html.xpath("//ul[@class=\"list-box\"]/li").nodes();
        if (allShopNodes != null && allShopNodes.size() > 0){
            if (StringUtils.isNotBlank(paging)){
                String cityId = (String) lastExtras.get("cityId");
                String brandId = (String) lastExtras.get("brandId");
                String curPage = (String) lastExtras.get("curPage");
                curPage += 1;
                String dealerListUrl = String.format(dealerListUrlFormat, cityId, brandId, curPage);
                CrawlerRequestRecord dealerListRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(dealerListUrl)
                        .recordKey(dealerListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .build();
                Map<String, Object> nextExtras = copyExtras(lastExtras);
                nextExtras.put("curPage",curPage);
                dealerListRecord.getHttpRequest().setExtras(nextExtras);
                parsedLinks.add(dealerListRecord);
            }
            for (Selectable shopNode : allShopNodes) {
                String shopId = shopNode.xpath("./@id").get();
                int curPage = 1;
                String saleListUrl = String.format(saleListUrlFormat,shopId,curPage);
                String newsListUrl = String.format(newsListUrlFormat,shopId,curPage);
                CrawlerRequestRecord saleRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(saleListUrl)
                        .recordKey(saleListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .notFilterRecord()
                        .build();
                saleRecord.getHttpRequest().addExtra("curPage",String.valueOf(curPage));
                saleRecord.getHttpRequest().addExtra("shopId",shopId);
                parsedLinks.add(saleRecord);
                CrawlerRequestRecord newsRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(newsListUrl)
                        .recordKey(newsListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .notFilterRecord()
                        .build();
                newsRecord.getHttpRequest().addExtra("curPage",String.valueOf(curPage));
                saleRecord.getHttpRequest().addExtra("shopId",shopId);
                parsedLinks.add(newsRecord);
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseIndexLink(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        Html html = httpPage.getHtml();
        List<Selectable> allBrandNodes = html.xpath("//ul[@class=\"filter-box\"]/li[@class=\"row row-hide data-brand-item\"]/div/a[@class=\"item\"]").nodes();
        List<Selectable> allCityNodes = html.xpath("//div[@class=\"city-dealer-box\"]/ul/li/a").nodes();
        for (Selectable brandNode : allBrandNodes) {
            String brand = brandNode.xpath("./text()").get();
            String htmlDealerUrl = brandNode.xpath("./@href").get();
            if (StringUtils.isNotBlank(htmlDealerUrl)){
                String brandId = htmlDealerUrl.split("/")[3];
                Map<String,String> brandMap = new HashMap<>();
                brandMap.put("brandName",brand);
                brandMap.put("brandId",brandId);
                for (Selectable cityNode : allCityNodes) {
                    String city = cityNode.xpath("./text()").get().split("4S")[0];
                    String cityId = cityNode.xpath("./@href").get().split("/")[1].split("#")[0];
                    int curPage = 1;
                    String dealerListUrl = String.format(dealerListUrlFormat, cityId, brandId, curPage);
                    Map<String,String> addrInfo = new HashMap<>();
                    addrInfo.put("city",city);
                    Map<String,Object> extras = new HashMap<>();
                    extras.put("cityId",cityId);
                    extras.put("brandId",brandId);
                    extras.put("curPage",curPage);
                    CrawlerRequestRecord dealerListRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRequestRecord)
                            .httpUrl(dealerListUrl)
                            .recordKey(dealerListUrl)
                            .releaseTime(System.currentTimeMillis())
                            .notFilterRecord()
                            .copyBizTags()
                            .build();
                    dealerListRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brandMap);
                    dealerListRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info,addrInfo);
                    dealerListRecord.getHttpRequest().setExtras(extras);
                    parsedLinks.add(dealerListRecord);
                }
            }
        }

        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        HttpRequest httpRequest = crawlerRecord.getHttpRequest();
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest recordHttpRequest = internalDownloadRecord.getHttpRequest();
            if (recordHttpRequest.getUrl().matches(contentUrlRegex)){
                try {
                    String rawText = internalDownloadRecord.getInternalDownloadPage().getRawText();
                    JSONObject pageObj = JSONObject.parseObject(rawText);
                    String contentHtml = pageObj.getString("content");
                    Html html = new Html(contentHtml);
                    StringBuffer sbContent = new StringBuffer();
                    List<String> contents = html.xpath("//div[@class=\"stencil-text\"]/p//text()|//div[@class=\"dealermain\"]/p//text()").all();
                    for (String content : contents) {
                        sbContent.append(content.trim());
                    }
                    httpRequest.addExtra("content",sbContent.toString());
                } catch (Exception e) {
                    httpRequest.addExtra("content","");
                }
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String requestUrl = httpRequest.getUrl();
        String rawText = httpPage.getRawText();
        if (StringUtils.isBlank(rawText)){
            return null;
        }
        Html html = httpPage.getHtml();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            //https://dealer.autohome.com.cn/2021459/news_627271557.html
            String articleKey = requestUrl.split("_")[1].split("\\.")[0];
            String[] strings = requestUrl.split("/");
            String authorId = strings[strings.length - 2];
            String vendorUrl = String.format("https://dealer.autohome.com.cn/%s/",authorId);

            List<String> titleList = html.xpath("//h1/text()").all();
            StringBuffer sbTitle = new StringBuffer();
            for (String s : titleList) {
                sbTitle.append(s);
            }
            String content = (String) extras.get("content");
            String path1st = html.xpath("//a[@class=\"current\"]/text()").get();
            List<String> breadNavs = html.xpath("//div[@class=\"breadnav\"]/p//a/text()").all();
            String path2nd = "";
            if (breadNavs != null && breadNavs.size() > 0){
                path2nd = breadNavs.get(2);
            }
            List<String> path = new ArrayList<>();
            path.add(path1st);
            path.add(path2nd);
            List<String> vendors = html.xpath("//ul[@class=\"dealerinfo-ul\"]/li[@class=\"markul\"]/text()").all();
            StringBuffer sbVendor = new StringBuffer();
            for (String s : vendors) {
                sbVendor.append(s);
            }
            List<String> vendorAddress = html.xpath("//ul[@class=\"dealerinfo-ul\"]/li[2]//text()").all();
            StringBuffer sbVendorAddr = new StringBuffer();
            for (String addr : vendorAddress) {
                if (StringUtils.isNotBlank(addr.trim())){
                    sbVendorAddr.append(addr.trim());
                }
            }
            String vendorTel = html.xpath("//ul[@class=\"dealerinfo-ul\"]/li/span[@class=\"red\"]/text()").get();
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .url(requestUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Author_Id,authorId)
                    .addContentKV(Field_Content,content)
                    .addContentKV(Field_Title,unescapeHtml2J(sbTitle.toString()))
                    .addContentKV("vendor",unescapeHtml2J(sbVendor.toString()))
                    .addContentKV("vendor_url",vendorUrl)
                    .addContentKV("telephone",vendorTel)
                    .addContentKV("address",sbVendorAddr.toString())
                    .addContentKV("promotion_period", extras.get("promotions") == null ?  (String) extras.get("promotions") : null)
                    .resultLabelTag(article)
                    .build();
            crawlerDataList.add(crawlerData);
            crawlerData.tagsCreator().bizTags().addCustomKV(Field_Path,path);
        }
        return crawlerDataList;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(dealerListUrlRegex);
        addUrlRegular(saleListUrlRegex);
        addUrlRegular(newsListUrlRegex);
        addUrlRegular(articleUrlRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "autohome";
    }

    public static Map<String, Object> copyExtras(Map<String, Object> inExtras) {
        Map<String, Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(), entry.getValue());
        }
        return extras;
    }

    public static String unescapeHtml2J(String str){
        int times = 0;
        while (str.contains("&") && str.contains(";")){
            str = StringEscapeUtils.unescapeHtml(str);
            times ++;
            if (times > 5){
                break;
            }
        }
        return str;
    }

}
