package com.chance.cc.crawler.development.scripts.bitauto.dealer;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @ClassName song
 * @Description TODO
 * @Author ding
 * @Date 2021/9/12 13:00
 * @Version 1.0
 **/
public class BitAutoDealerCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(BitAutoDealerCrawlerScript.class);
    private static final String domain = "bitauto";
    private static final String site = "bitAutoDealer";
    private static final String RECORD_AGAIN_REQUEST = "record_again_request";
    private static final String HTTPS = "https:";

    private static final String bitAuto = "https://dealer.yiche.com/";
    private static final String bitAutoUrl = "https://dealer.yiche.com";
    private static final String dealerUrl = "https://dealer.yiche.com/\\S*/\\S*/";
    private static final String dealerUrlRegulars = "https://dealer.yiche.com/\\S*/\\S*/\\?BizModes=0&page=\\S*";

    private static final String articleListUrl  = "https://dealer.yiche.com/\\S*/news.html";
    private static final String articleUrl = "https://dealer.yiche.com/\\S*/news/\\S*/\\S*.html";
    private static final String pageUrl = "https://dealer.yiche.com/\\S*/\\S*.html\\?page=\\S*";
    private static final String photoRegulars = "http://frontapi.easypass.cn/eilv3/das2.ashx\\?userid=\\S*&mediaid=10&source=netease";

    private static final Map<String,String> map = new HashMap<>();
    private static String shopPhoto = null;
    private static String PhotoId = null;
    @Override
    public void initUrlRegulars() {
        addUrlRegular(bitAuto);
        addUrlRegular(dealerUrl);
        addUrlRegular(articleListUrl);
        addUrlRegular(articleUrl);
        addUrlRegular(pageUrl);
        addUrlRegular(dealerUrlRegulars);
        addUrlRegular(photoRegulars);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> listRecord = new ArrayList<>();
        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord :supportSourceRecords){
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")){
                    HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
                    Json json = internalDownloadPage.getJson();
                    String msg = json.jsonPath($_type+".msg").get();
                    if (!"success".equals(msg)){
                        log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
                        return listRecord;
                    }
                    List<String> all = json.jsonPath($_type + ".content").all();
                    for (String data : all) {
                        JSONObject jsonObject = JSONObject.parseObject(data);
                        JSONObject keyword = jsonObject.getJSONObject("keyword");
                        String city = (String) keyword.get("city");
                        JSONArray series = keyword.getJSONArray("series");
                        for (int i = 0; i <series.size() ; i++) {
                            int val = 1;
                            String seriesId = (String) series.get(i);
                            String startUrl = bitAuto + city+"/"+seriesId+"/"+"?BizModes=0&page="+val;
                            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                .itemPageRequest(requestRecord)
                                .httpUrl(startUrl)
                                .releaseTime(System.currentTimeMillis())
                                .copyResultTags()
                                .copyBizTags()
                                .build();
                            record.tagsCreator().bizTags().addCustomKV("pageSize",val);
                        listRecord.add(record);
                        }

                    }
                }
            }
        }
        return listRecord;
    }


    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 ||  !page.isDownloadSuccess()){
            log.error("page == [{}] || statusCode != 200 and error page = "+ page.getStatusCode());
            if (page.getStatusCode() != 404){
                recordAgainRequest(crawlerRecord,parseLinks);
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }else{
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(dealerUrl) | url.matches(dealerUrlRegulars)){
            this.parseList(crawlerRecord,page,parseLinks);
        }
        if (url.matches(articleListUrl) || url.matches(pageUrl)){
            this.parseArticle(crawlerRecord,page,parseLinks);
        }
        if (url.matches(articleUrl)){
           // this.getPhoto(crawlerRecord,page,parseLinks);
        }
        return parseLinks;
    }



    private void parseArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        List<Selectable> nodes = page.getHtml().xpath("/html/body/div[4]/div[2]/div[2]/ul/li").nodes();
        for (Selectable node : nodes){
            String text = node.xpath("./h3/span/text()").get();
            if (text == null){
                return;
            }
            String url = node.xpath("./h3/a/@href").get();
            url = bitAutoUrl + url;
            CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .recordKey(url)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parseLinks.add(articleRecord);
        }
        List<Selectable> nodes1 = page.getHtml().xpath("//*[@id=\"pager\"]/a").nodes();
        if (nodes.size() == 0 || nodes1==null){
            return;
        }
        String pageUrl = nodes1.get(nodes1.size() - 1).xpath("./@href").get();
        pageUrl = bitAutoUrl + pageUrl;
        CrawlerRequestRecord pageRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(pageUrl)
                .recordKey(pageUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parseLinks.add(pageRecord);

    }

    /*
    *
    * */
    private void parseList(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        List<Selectable> nodes = page.getHtml().xpath("/html/body/div[3]/div[2]/div[1]/div[3]/div[3]/div").nodes();
        if (nodes.size() == 0 || nodes ==null){
            return;
        }

        String brand = page.getHtml().xpath("/html/body/div[3]/div[2]/div[1]/div[1]/div/div/a[3]/text()").get();
        String city = page.getHtml().xpath("/html/body/div[3]/div[2]/div[1]/div[1]/div/div/strong/text()").get();
        for (Selectable node : nodes){
            String s = node.xpath("./div[1]/h6/a/@href").get();
            if (s == null){
                return;
            }
            String photoId =  node.xpath("./div[1]/p[@class=\"tel\"]/input/@value").get();//得到电话接口
            String photoUrl = "http://frontapi.easypass.cn/eilv3/das2.ashx?userid="+photoId+"&mediaid=10&source=netease";
            CrawlerRequestRecord photoRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(photoUrl)
                    .recordKey(photoUrl)
                    .releaseTime(System.currentTimeMillis())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            photoRecord.tagsCreator().bizTags().addCustomKV("photoId",photoId);
            parseLinks.add(photoRecord);

            String[] split = s.split("\\?");
            String url = split[0];
            url = HTTPS + url +"news.html";
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .recordKey(url)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            record.tagsCreator().bizTags().addCustomKV("brand",brand);
            record.tagsCreator().bizTags().addCustomKV("city",city);
            record.tagsCreator().bizTags().addCustomKV("photoId",photoId);
            parseLinks.add(record);
        }
        List<Selectable> pageNode = page.getHtml().xpath("/html/body/div[3]/div[2]/div[1]/div[3]/div[3]/div[@class=\"pagination\"]/div/a").nodes();
        if (pageNode.size() == 0){
            return;
        }
        String pageHref = pageNode.get(pageNode.size() - 1).xpath("./@href").get();//得到下一页

        if (pageHref != null){
            String listUrl = bitAutoUrl + pageHref;
            listUrl = listUrl.replaceAll("amp;","");
            CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(listUrl)
                    .recordKey(listUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parseLinks.add(listRecord);
        }

    }
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links){
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords){
            if (internalDownloadRecord.getHttpRequest().getUrl().matches(photoRegulars)){
                HttpPage page = internalDownloadRecord.getInternalDownloadPage();
                String[] split = page.getRawText().split("\"tel\":\"");
                String photo = split[1];
                photo = photo.substring(0,photo.length()-4);
                String photoId = internalDownloadRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("photoId");
                map.put(photoId,photo);
            }
        }

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> listData = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(articleUrl)){//清洗不存在停售款的
            this.washArticle(crawlerRecord,page,listData);
        }
        return listData;
    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listData)  {
        String photoId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("photoId");
        String phone =  map.get(photoId);//电话
        if (this.shopPhoto == null){
            this.shopPhoto = phone;
            this.PhotoId = photoId;
        }else if (!this.shopPhoto.equals(phone)){
            this.shopPhoto = phone;
            map.remove(this.PhotoId);
            this.PhotoId = photoId;
        }
        crawlerRecord.tagsCreator().bizTags().getCategoryTag().addKVTag("photoMap",null);
        String url = crawlerRecord.getHttpRequest().getUrl();
        String[] split = url.split("/");
        String dataId = split[split.length-1];
        dataId = dataId.replace(".html","");
        String promotionTime = page.getHtml().xpath("/html/body/div[4]/div[2]/div/div[2]/div[1]/strong/text()").get();//剩余时间
        if (promotionTime == null){
            promotionTime = page.getHtml().xpath("//span[@class=\"s_time\"]").get();
        }else{
            promotionTime = "剩余" + promotionTime + "天";
        }
        String storeName = page.getHtml().xpath("/html/body/div[2]/div/div[2]/h1/text()").get();//商铺名字
        String storeUrl = page.getHtml().xpath("/html/body/div[4]/div[1]/a[1]/@href").get();
        if (!storeUrl.contains("index.html")){
            storeUrl = storeUrl + "/index.html";
        }
        String[] split1 = storeUrl.split("/");
        String storeId = split1[1];
        storeUrl = bitAutoUrl + storeUrl;
        String address = page.getHtml().xpath("/html/body/div[2]/div/div[2]/div[2]/text()").get();//地址
        List<String> time = page.getHtml().xpath("/html/body/div[4]/div[2]/div/div[1]/span/text()[2]").all();//发布时间

        Long releaseTime = 0l;
        for (String s : time){
            if (s.contains("年") && s.contains("月") && s.contains("日")){
                try {
                    releaseTime = DateUtils.parseDate(s,"yyyy年MM月dd日").getTime();
                } catch (ParseException e) {
                    e.printStackTrace();
                }
            }
        }
        if (releaseTime == 0l){
            releaseTime = System.currentTimeMillis();
        }
        String title = page.getHtml().xpath("/html/body/div[4]/div[2]/div/h1/text()").get();//标题
        List<String> text = page.getHtml().xpath("/html/body/div[4]/div[2]/div/div[2]/p[1]//text()").all();
        StringBuffer content = new StringBuffer();
        String brand = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("brand");
        String city = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("city");
        for (String s : text){
            content.append(s);
        }

        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRecord,page)
                .url(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(releaseTime)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,dataId))
                .addContentKV(AutoVMCommonField.Field_Remaining_Time,promotionTime)
                .addContentKV(AutoVMCommonField.Field_Shop_Name,storeName)
                .addContentKV(AutoVMCommonField.Field_Shop_Phone,phone)
                .addContentKV(AutoVMCommonField.Field_Shop_Address,address)
                .addContentKV(AICCommonField.Field_Title,title)
                .addContentKV(AICCommonField.Field_Content, String.valueOf(content))
                .addContentKV(AutoVMCommonField.Field_Shop_id,storeId)//商铺id
                .addContentKV(AutoVMCommonField.Field_Shop_Url,storeUrl)//商铺url
                .addContentKV(AutoVMCommonField.Tag_City,city)//城市
                .addContentKV(AutoVMCommonField.Field_Brand_name,brand)//品牌
                .flowInPipelineTag("kafka")
                .build();
        crawlerData.setFilterPipelineResult(true);
        listData.add(crawlerData);
    }


    private static void recordAgainRequest(CrawlerRequestRecord crawlerRequestRecord,List<CrawlerRequestRecord> parseList){
        int count = 0;
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(RECORD_AGAIN_REQUEST)){
            count = Integer.valueOf(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(RECORD_AGAIN_REQUEST));
            if (count >= 10){
                log.error("url excessive number of repeated downloads this url = "+url);
            }
        }else{
            count = 1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .releaseTime(System.currentTimeMillis())
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey()+count)
                .copyResultTags()
                .copyBizTags()
                .build();

        record.getHttpRequest().setCookies(crawlerRequestRecord.getHttpRequest().getCookies());
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        record.getHttpRequest().setHeaders(crawlerRequestRecord.getHttpRequest().getHeaders());
        record.setNeedParsedPage(crawlerRequestRecord.isNeedParsedPage());
        record.setNeedWashPage(crawlerRequestRecord.isNeedWashPage());
        record.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count++);
        parseList.add(record);

    }
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
}
