package com.chance.cc.crawler.development.scripts.pcauto.article;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Addr_Info;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Brand;

/**
 * @author lt
 * @version 1.0
 * @date 2021-02-20 16:01:04
 * @email okprog@sina.com
 */
public class PcAutoDealerArticleCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(PcAutoDealerArticleCrawlerScript.class);
    public static final String indexRegex = "https?://price\\.pcauto\\.com\\.cn/shangjia/\\S*/";
    public static final String dealerListUrlRegex = "https?://price\\.pcauto\\.com\\.cn/shangjia/\\S*/nb\\d*/p\\d*\\.html";
    public static final String saleListUrlRegex = "https?://price\\.pcauto\\.com\\.cn/\\d*/p\\d*/market\\.html";
    public static final String articleUrlRegex = "https?://price\\.pcauto\\.com\\.cn/market/\\d*-\\d*\\.html";
    public static final String contentUrlRegex = "https://dealer\\.autohome\\.com\\.cn/News/GetNewsContent\\S*";

    public static final String dealerListUrlFormat = "https://price.pcauto.com.cn/shangjia/%s/%s/p%s.html"; //cityId brandId pageNo
    public static final String saleListUrlFormat = "https://price.pcauto.com.cn/%s/p%s/market.html";
    public static final String articleUrlFormat = "https://price.pcauto.com.cn/market/%s-%s.html";
    public static final String newsContentUrlFormat = "https://dealer.autohome.com.cn/News/GetNewsContent?dealerId=%s&newsid=%s";

    private static final String scriptSite = "dealer";

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || statusCode != 200){
            if (statusCode == 404){
                logger.error("页面不存在：{}" , statusCode);
                return parsedLinks;
            }
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            logger.error("页面下载状态：{}，状态码：{}，内容为空：{}，实行回推",httpPage.isDownloadSuccess(),statusCode,StringUtils.isBlank(httpPage.getRawText()));
            return parsedLinks;
        }
        if (lastRequestUrl.matches(indexRegex)){
            return parseIndexLink(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(dealerListUrlRegex)){
            return parseDealerListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(saleListUrlRegex)){
            return parseSaleListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(articleUrlRegex)){
            return parsedLinks;
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseSaleListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String requestUrl = httpRequest.getUrl();
        Map<String, Object> listExtras = httpRequest.getExtras();
        Html html = httpPage.getHtml();
        List<Selectable> itemNodes = html.xpath("//div[@class=\"mainleft\"]/ul/li").nodes();
        if (null != itemNodes){
            String shopId = (String) listExtras.get("shopId");
            if (itemNodes.size() >= 8){
                int pageNo = Integer.parseInt((String)listExtras.get("pageNo"));
                pageNo += 1;
                String saleListUrl = String.format(saleListUrlFormat,shopId, 1);
                CrawlerRequestRecord saleListRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(saleListUrl)
                        .recordKey(saleListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                saleListRecord.getHttpRequest().addExtra("pageNo", String.valueOf(pageNo));
                saleListRecord.getHttpRequest().addExtra("shopId", shopId);
                parsedLinks.add(saleListRecord);
            }
            for (Selectable itemNode : itemNodes) {
               String itemUrl = itemNode.xpath("./a/@href").get();
               if (StringUtils.isBlank(itemUrl)){
                   continue;
               }
               String articleKey = itemUrl.split("-")[1].split("\\.")[0];
               itemUrl = String.format(articleUrlFormat,shopId,articleKey);
               String pubTime = itemNode.xpath("./div/div[@class=\"tagcont\"]/span/text()").get();
                List<String> promotions = itemNode.xpath("./div/div[@class=\"tagcont\"]/em//text()").all();

                try {
                   pubTime = pubTime.split(":")[1];
                   CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                           .itemPageRequest(crawlerRequestRecord)
                           .httpUrl(itemUrl)
                           .recordKey(itemUrl)
                           .releaseTime(DateUtils.parseDate(pubTime,"yyyy-MM-dd").getTime())
                           .copyBizTags()
                           .resultLabelTag(article)
                           .needWashed(true)
                           .needParsed(true)
                           .build();
                   if (null != promotions && promotions.size() > 0){
                       StringBuffer sbPromotions = new StringBuffer();
                       for (String promotion : promotions) {
                           sbPromotions.append(promotion);
                       }
                       itemRecord.getHttpRequest().addExtra("promotions",sbPromotions.toString());
                   }
                   parsedLinks.add(itemRecord);
               }catch (Exception e){
                   logger.error("parse date error");
               }
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseDealerListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> listExtras = httpRequest.getExtras();
        Html html = httpPage.getHtml();
        List<Selectable> itemNodes = html.xpath("//div[@class=\"listTb\"]/ul/li").nodes();
        List<CrawlerRequestRecord> greenLinks = new ArrayList<>();
        for (Selectable itemNode : itemNodes) {
            String saleListUrl = itemNode.xpath("./div[@class=\"divYSd\"]/p[1]/a/@href").get();
            String greenSpan = itemNode.xpath("./div[@class=\"divYSd\"]/p[1]/span[2]/@class").get();
            if (StringUtils.isBlank(saleListUrl) || !greenSpan.endsWith("green")){
                continue;
            }
            String[] itemUrlSplit = saleListUrl.split("/");
            String shopId = itemUrlSplit[itemUrlSplit.length - 1];
            int pageNo = 1;
            saleListUrl = String.format(saleListUrlFormat,shopId, 1);
            CrawlerRequestRecord saleListRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(saleListUrl)
                    .recordKey(saleListUrl)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .build();
            saleListRecord.getHttpRequest().addExtra("pageNo", String.valueOf(pageNo));
            saleListRecord.getHttpRequest().addExtra("shopId", shopId);
            greenLinks.add(saleListRecord);
        }
        if (greenLinks.size() >= 10){
            String cityId = (String) listExtras.get("cityId");
            String brandId = (String) listExtras.get("brandId");
            int curPage = Integer.parseInt((String) listExtras.get("curPage"));
            curPage += 1;
            String dealerListUrl = String.format(dealerListUrlFormat,cityId,brandId,curPage);
            Map<String, Object> nextListExtras = copyExtras(listExtras);
            nextListExtras.put("curPage",curPage);
            CrawlerRequestRecord dealerListRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(dealerListUrl)
                    .recordKey(dealerListUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .notFilterRecord()
                    .build();
            dealerListRecord.getHttpRequest().setExtras(nextListExtras);
            parsedLinks.add(dealerListRecord);
        }
        parsedLinks.addAll(greenLinks);
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseIndexLink(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String requestUrl = httpRequest.getUrl();
        String[] urlSplit = requestUrl.split("/");
        String cityId = urlSplit[urlSplit.length - 1];
        Html html = httpPage.getHtml();
        String city = html.xpath("//em[@class=\"select-c\"]/a/text()").get();
        //初始链接解析所有的城市链接
        if ("c3".equalsIgnoreCase(cityId)){
            List<String> allCities = html.xpath("//div[@class=\"Items cityItems\"]/a[@class=\"tag\"]/@href").all();
            for (String cityUrl : allCities) {
                if (!cityUrl.startsWith("http")){
                    cityUrl = "https:" + cityUrl;
                }
                CrawlerRequestRecord cityRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(cityUrl)
                        .recordKey(cityUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .build();
                parsedLinks.add(cityRecord);
            }
        }
        List<Selectable> brandNodes = html.xpath("//div[@id=\"hotDiv\"]/p[@class=\"clearfix brandDiv\"]/a[@class=\"tag\"]").nodes();
        //使用hashMap进行一次去重
        Map<String,Selectable> duplicateMap = new HashMap<>();
        for (Selectable brandNode : brandNodes) {
            String brandName = brandNode.xpath("./text()").get();
            duplicateMap.put(brandName,brandNode);
        }
        for (Map.Entry<String, Selectable> selectableEntry : duplicateMap.entrySet()) {
            String brandName = selectableEntry.getKey();
            Selectable brandNode = selectableEntry.getValue();
            String brandUrl = brandNode.xpath("./@href").get();
            String[] brandUrls = brandUrl.split("/");
            String brandId = brandUrls[brandUrls.length - 1];
            int pageNo = 1;
            String dealerListUrl = String.format(dealerListUrlFormat, cityId, brandId, pageNo);
            Map<String,String> brandMap = new HashMap<>();
            brandMap.put("brandName",brandName);
            brandMap.put("brandId",brandId);
            Map<String,String> addrInfo = new HashMap<>();
            addrInfo.put("city",city);
            Map<String,Object> extras = new HashMap<>();
            extras.put("cityId",cityId);
            extras.put("brandId",brandId);
            extras.put("curPage",pageNo);
            CrawlerRequestRecord dealerListRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(dealerListUrl)
                    .recordKey(dealerListUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .notFilterRecord()
                    .build();
            dealerListRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brandMap);
            dealerListRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info,addrInfo);
            dealerListRecord.getHttpRequest().setExtras(extras);
            parsedLinks.add(dealerListRecord);
        }
        return parsedLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String requestUrl = httpRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText())){
            return null;
        }
        Html html = httpPage.getHtml();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            String articleKey = requestUrl.split("-")[1].split("\\.")[0];
            String authorId = requestUrl.substring(requestUrl.lastIndexOf("/") + 1).split("-")[0];
            String vendorUrl = String.format("https://price.pcauto.com.cn/%s/",authorId);

            String title = html.xpath("//div[@class=\"mainleft\"]/p/text()").get();
            String content = html.xpath("//div[@class=\"mainleft\"]/span[@class=\"mosmtext\"]/text()").get();
            String path1st = html.xpath("//div[@class=\"metros\"]/a[4]/text()").get();
            String path2nd = path1st;

            List<String> path = new ArrayList<>();
            path.add(path1st);
            path.add(path2nd);
            String vendor = html.xpath("//div[@class=\"topleftpt\"]/p[@class=\"tit\"]/@title").get();
            List<String> vendorAddress = html.xpath("//div[@class=\"topleftpt\"]/div[@class=\"otlisttl\"]/div[@class=\"modeali\"]//text()").all();
            StringBuffer sbVendorAddr = new StringBuffer();
            for (String addr : vendorAddress) {
                if (StringUtils.isNotBlank(addr.trim())){
                    sbVendorAddr.append(addr.trim());
                }
            }
            String vendorTel = html.xpath("//div[@class=\"topleftpt\"]/div/i/strong[@class=\"red\"]/text()").get();
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .url(requestUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Author_Id,authorId)
                    .addContentKV(Field_Content,content)
                    .addContentKV(Field_Title,title)
                    .addContentKV("vendor",vendor)
                    .addContentKV("vendor_url",vendorUrl)
                    .addContentKV("telephone",vendorTel)
                    .addContentKV("address",sbVendorAddr.toString())
                    .addContentKV("promotion_period", extras.get("promotions") == null ?  (String) extras.get("promotions") : "")
                    .resultLabelTag(article)
                    .build();
            crawlerDataList.add(crawlerData);
            crawlerData.tagsCreator().bizTags().addCustomKV(Field_Path,path);
            return crawlerDataList;
        }
        return null;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(dealerListUrlRegex);
        addUrlRegular(saleListUrlRegex);
        addUrlRegular(articleUrlRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "pcauto";
    }

    public static Map<String, Object> copyExtras(Map<String,Object> inExtras){
        Map<String,Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(),entry.getValue());
        }
        return extras;
    }
}
