package com.chance.cc.crawler.development.scripts.bitauto.article;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConstant;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Field_Path;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Addr_Info;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Brand;

/**
 * @author lt
 * @version 1.0
 * @date 2021-03-09 14:51:08
 * @email okprog@sina.com
 */
public class BitAutoDealerArticleCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(BitAutoDealerArticleCrawlerScript.class);

    public static final String dealerIndexRegex = "https://dealer\\.yiche\\.com/";
    public static final String cityUrlRegex = "https://cmsapi\\.bitauto\\.com/city/getcity\\.ashx\\?requesttype=json&bizCity=1";
    public static final String brandUrlRegex = "https://apicar\\.bitauto\\.com/CarInfo/getlefttreejson\\.ashx\\S*";
    public static final String dealerListUrlRegex = "https://dealer\\.yiche\\.com/\\w*/\\S*/\\?BizModes=0&page=\\d*";
    public static final String newsListUrlRegex = "https://dealer\\.yiche\\.com/\\d*/news_\\d*.html\\?page=\\d*";
    public static final String articleUrlRegex = "https://dealer\\.yiche\\.com/\\d*/news/\\d*/\\d*\\.html";

    public static final String brandUrlFormat = "https://apicar.bitauto.com/CarInfo/getlefttreejson.ashx?tagtype=jingxiaoshang&citycode=%s&cityid=%s";
    public static final String dealerListUrlFormat = "https://dealer.yiche.com/%s/%s/?BizModes=0&page=%s";
    public static final String saleListUrlFormat = "https://dealer.yiche.com/%s/news_2.html?page=%s";
    public static final String newsListUrlFormat = "https://dealer.yiche.com/%s/news_1.html?page=%s";
    public static final String articleUrlFormat = "https://dealer.yiche.com/%s/news/%s/%s.html";

    private static final String scriptSite = "dealer";


    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allBrandRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        CrawlerRequestRecord cityRecord = supportSourceRecords.get(0);
        String cityUrl = cityRecord.getHttpRequest().getUrl();
        if (cityUrl.matches(cityUrlRegex)){
            try {
                JSONArray cities = JSONArray.parseArray(cityRecord.getInternalDownloadPage().getRawText());
                if (null != cities && cities.size() > 0){
                    for (Object city : cities) {
                        JSONObject cityObj = (JSONObject)city;
                        String cityPinYin = cityObj.getString("cityPinYin");
                        String cityId = cityObj.getString("cityId");
                        String brandUrl = String.format(brandUrlFormat,cityPinYin,cityId);
                        CrawlerRequestRecord brandRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .httpUrl(brandUrl)
                                .recordKey(brandUrl)
                                .releaseTime(System.currentTimeMillis())
                                .notFilterRecord()
                                .copyBizTags()
                                .build();
                        HttpRequest brandRequest = brandRecord.getHttpRequest();
                        brandRequest.addExtra("cityId",cityId);
                        brandRequest.addExtra("cityPinYin",cityPinYin);
                        allBrandRecords.add(brandRecord);
                        break;
                    }
                }
            }catch (Exception e){
                logger.error(e.getMessage(),e);
            }
        }
        if (allBrandRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allBrandRecords;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || statusCode != 200){
            if (statusCode == 404){
                logger.error("页面不存在：{}" , statusCode);
                return parsedLinks;
            }
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            logger.error("页面下载状态：{}，状态码：{}，内容为空：{}，实行回推",httpPage.isDownloadSuccess(),statusCode,StringUtils.isBlank(httpPage.getRawText()));
            return parsedLinks;
        }
        if (lastRequestUrl.matches(brandUrlRegex)){
            return parseBrandLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(dealerListUrlRegex)){
            return parseDealerListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(newsListUrlRegex)){
            return parseNewsListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseNewsListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String dealerId = (String)extras.get("dealerId");
        Html html = httpPage.getHtml();
        List<Selectable> itemNodes = html.xpath("//div[@class=\"mov_news\"]/ul/li").nodes();
        if (null != itemNodes && itemNodes.size() > 0){
            String pager = html.xpath("//div[@id=\"pager\"]/a/text()").get();
            if (StringUtils.isNotBlank(pager)){
                Map<String, Object> urlParams = getUrlParams(httpRequest.getUrl());
                int page = Integer.parseInt((String)urlParams.get("page"));
                page ++;
                String nextNewsListUrl = "";
                if (httpRequest.getUrl().contains("news_2")){
                    nextNewsListUrl = String.format(saleListUrlFormat,dealerId,page);
                }else {
                    nextNewsListUrl = String.format(newsListUrlFormat,dealerId,page);
                }
                CrawlerRequestRecord nextNewsListRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextNewsListUrl)
                        .recordKey(nextNewsListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                nextNewsListRecord.getHttpRequest().setExtras(copyExtras(extras));
                parsedLinks.add(nextNewsListRecord);
            }
            for (Selectable itemNode : itemNodes) {
                String articleUrl = itemNode.xpath("./h3/a/@href").get();
                String[] split = articleUrl.split("/");
                String articleUrlDate = split[split.length - 2];
                String articleKey = split[split.length - 1].split("\\.")[0];
                String itemUrl = String.format(articleUrlFormat,dealerId,articleUrlDate,articleKey);
                String pubTime = itemNode.xpath("./div[contains(@class,\"news_txt\")]/p/span/text()").get();
                String promotions = itemNode.xpath("./h3/span[@class=\"s_time\"]/text()").get();
                try {
                    CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRequestRecord)
                            .httpUrl(itemUrl)
                            .recordKey(itemUrl)
                            .releaseTime(DateUtils.parseDate(pubTime,"yyyy-MM-dd").getTime())
                            .copyBizTags()
                            .resultLabelTag(article)
                            .needParsed(false)
                            .needWashed(true)
                            .build();
                    if (StringUtils.isNotBlank(promotions)){
                        itemRecord.getHttpRequest().addExtra("promotions",promotions);
                    }
                    parsedLinks.add(itemRecord);
                } catch (ParseException e) {
                    logger.error("parse date error");
                }
            }
        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseDealerListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        Html html = httpPage.getHtml();
        List<Selectable> dealerNodes = html.xpath("//div[@class=\"row dealer-list\"]").nodes();
        if (null != dealerNodes && dealerNodes.size() > 0){
            String next = html.xpath("//div[@class=\"pagination\"]//a[@class=\"next_on\"]/text()").get();
            if (StringUtils.isNotBlank(next)){
                String cityPinYin = (String) extras.get("cityPinYin");
                String brandPinYin = (String) extras.get("brandPinYin");
                Map<String, Object> urlParams = getUrlParams(httpRequest.getUrl());
                int page = Integer.parseInt((String)urlParams.get("page"));
                String nextDealerListUrl = String.format(dealerListUrlFormat, cityPinYin, brandPinYin, (page + 1));
                CrawlerRequestRecord dealerListRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextDealerListUrl)
                        .recordKey(nextDealerListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .build();
                dealerListRecord.getHttpRequest().setExtras(copyExtras(extras));
                parsedLinks.add(dealerListRecord);
            }
            for (Selectable dealerNode : dealerNodes) {
                String dealerUrl = dealerNode.xpath("./div/h6/a/@href").get();
                String[] split = dealerUrl.split("/");
                String dealerId = split[split.length - 2];
                String saleListUrl = String.format(saleListUrlFormat, dealerId, 1);
                CrawlerRequestRecord saleListRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(saleListUrl)
                        .recordKey(saleListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .notFilterRecord()
                        .build();
                saleListRecord.getHttpRequest().addExtra("dealerId", dealerId);
                parsedLinks.add(saleListRecord);

                String newsListUrl = String.format(newsListUrlFormat, dealerId, 1);
                CrawlerRequestRecord newsListRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(newsListUrl)
                        .recordKey(newsListUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .notFilterRecord()
                        .build();
                newsListRecord.getHttpRequest().addExtra("dealerId", dealerId);
                parsedLinks.add(newsListRecord);
            }
        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseBrandLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String cityPinYin = (String) extras.get("cityPinYin");

        String rawText = httpPage.getRawText();
        try {
            int start = rawText.indexOf("(");
            int end = rawText.lastIndexOf(")");
            String substring = rawText.substring(start + 1, end);
            Map resultMap = JSON.parseObject(substring, Map.class);
            JSONObject pageObj = JSONObject.parseObject(JSON.toJSONString(resultMap));
            JSONObject allBrand = pageObj.getJSONObject("brand");
            Set<String> allKeys = allBrand.keySet();
            for (String key : allKeys) {
                JSONArray brands = allBrand.getJSONArray(key);
                for (Object brand : brands) {
                    JSONObject brandObj = (JSONObject)brand;
                    String brandName = brandObj.getString("name");
                    String url = brandObj.getString("url");
                    int num = brandObj.getIntValue("num");
                    if (num == 0){
                        continue;
                    }
                    String brandPinYin = url.split(cityPinYin)[1].replace("/", "");
                    String dealerListUrl = String.format(dealerListUrlFormat, cityPinYin, brandPinYin, 1);
                    CrawlerRequestRecord dealerListRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRequestRecord)
                            .httpUrl(dealerListUrl)
                            .recordKey(dealerListUrl)
                            .releaseTime(System.currentTimeMillis())
                            .notFilterRecord()
                            .copyBizTags()
                            .build();
                    dealerListRecord.getHttpRequest().addExtra("cityPinYin",cityPinYin);
                    dealerListRecord.getHttpRequest().addExtra("brandPinYin",brandPinYin);
                    dealerListRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brandName);
                    parsedLinks.add(dealerListRecord);
                }
            }
        } catch (Exception e) {
            logger.error("brand page parse error");
            parsedLinks.add(crawlerRequestRecord);
        }
        return parsedLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String requestUrl = httpRequest.getUrl();
        String rawText = httpPage.getRawText();
        if (StringUtils.isBlank(rawText)){
            return null;
        }
        Html html = httpPage.getHtml();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            //https://dealer.autohome.com.cn/2021459/news_627271557.html
            String articleKey = requestUrl.substring(requestUrl.lastIndexOf("/") + 1).split("\\.")[0];
            String[] strings = requestUrl.split("/");
            String authorId = strings[strings.length - 4];
            String vendorUrl = String.format("https://dealer.yiche.com/%s/",authorId);

            String title = html.xpath("//h1[@class=\"ad\"]/text()").get();
            List<String> allContents = html.xpath("//div[@class=\"article\"]//p//text()").all();
            StringBuffer sbContent = new StringBuffer();
            for (String allContent : allContents) {
                sbContent.append(allContent);
            }

            String path1st = html.xpath("//li[contains(@class,\"current\")]/a/text()").get();
            String path2nd = html.xpath("//div[@class=\"brand\"]/a[2]/text()").get();
            if (path2nd.contains(" ")){
                path2nd = path2nd.split(" ")[0];
            }
            List<String> path = new ArrayList<>();
            path.add(path1st);
            path.add(path2nd);

            String vendor = "";
            String address = "";
            String vendorTel = "";
            String city = "";
            Matcher mtVendor = Pattern.compile("DealerName: \"\\S*\",").matcher(rawText);
            Matcher mtAddr = Pattern.compile("Address: \"\\S*\",").matcher(rawText);
            Matcher mtTel = Pattern.compile("Tel: \"(.*)\"").matcher(rawText);
            Matcher mtCity = Pattern.compile("CityName: \"\\S*\",").matcher(rawText);

            while (mtVendor.find()){
                vendor = mtVendor.group(0).split("\"")[1];
            }

            while (mtAddr.find()){
                address = mtAddr.group(0).split("\"")[1];
            }

            while (mtTel.find()){
                vendorTel = mtTel.group(0).split("\"")[1];
            }

            while (mtCity.find()){
                city = mtCity.group(0).split("\"")[1];
            }

            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            Map<String,String> addrInfo = new HashMap<>();
            addrInfo.put("city",city);

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .url(requestUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Author_Id,authorId)
                    .addContentKV(Field_Content,sbContent.toString())
                    .addContentKV(Field_Title,unescapeHtml2J(title))
                    .addContentKV("vendor",vendor)
                    .addContentKV("vendor_url",vendorUrl)
                    .addContentKV("telephone",vendorTel)
                    .addContentKV("address",address)
                    .addContentKV("promotion_period", extras.get("promotions") == null ?  (String) extras.get("promotions") : null)
                    .resultLabelTag(article)
                    .build();
            crawlerData.tagsCreator().bizTags().addCustomKV(Field_Path,path);
            crawlerData.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info,addrInfo);
            crawlerDataList.add(crawlerData);
        }
        return crawlerDataList;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(dealerIndexRegex);
        addUrlRegular(cityUrlRegex);
        addUrlRegular(brandUrlRegex);
        addUrlRegular(dealerListUrlRegex);
        addUrlRegular(newsListUrlRegex);
        addUrlRegular(articleUrlRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "bitauto";
    }

    public static Map<String, Object> copyExtras(Map<String,Object> inExtras){
        Map<String,Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(),entry.getValue());
        }
        return extras;
    }

    /**
     * 将url参数转换成map
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = null;
        if (url.contains("?")){
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }


    public static String unescapeHtml2J(String str){
        int times = 0;
        while (str.contains("&") && str.contains(";")){
            str = StringEscapeUtils.unescapeHtml(str);
            times ++;
            if (times > 5){
                break;
            }
        }
        return str;
    }

}
