package com.chance.cc.crawler.development.scripts.dxy;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.net.URLEncoder;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Field_Images;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-02-05 10:28:05
 * @email okprog@sina.com
 */
public class DXYCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(DXYCrawlerScript.class);

    private static final String SITE_NEWS = "news";
    private static final String SITE_FORUM = "forum";

    private static final String indexRegex = "https?://portal\\.dxy\\.cn/";
    private static final String keysRegex = "https?://\\S*v1/meta/dxy/keys\\S*";
    private static final String listUrlRegex = "https?://search\\.dxy\\.cn/\\?words=\\S*";
    private static final String newsUrlRegex = "https?://\\w*\\.dxy\\.cn/\\w*/\\d*\\S*";
    private static final String topicUrlRegex = "https?://www\\.dxy\\.cn/bbs/topic/\\d*\\S*";
    private static final String bbsUrlRegex = "https?://3g\\.dxy\\.cn/bbs/bbsapi/mobile\\?s=view_topic&\\S*";
    private static final String userInfoRegex = "https?://3g\\.dxy\\.cn/snsapi/username/\\S*";

    public static final String searchNewsUrlFormat = "https://search.dxy.cn/?words=%s&page=%s&source=CMS&limit=15&o=1";
    public static final String searchBBSUrlFormat = "https://search.dxy.cn/?words=%s&page=%s&source=BBS&limit=15&o=1";
    public static final String topicUrlFormat = "https://www.dxy.cn/bbs/newweb/pc/post/%s";
    public static final String bbsUrlFormat = "https://3g.dxy.cn/bbs/bbsapi/mobile?s=view_topic&checkUserAction=0&withGood=1&order=1&size=3&id=%s&page=%s";
    public static final String userInfoUrlFormat = "https://3g.dxy.cn/snsapi/username/%s";

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)){
            try {
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());

                if (jsonObject.getIntValue("status") == 0){
                    JSONArray objects = jsonObject.getJSONArray("content");
                    for (Object object : objects) {
                        String keyword = ((JSONObject)object).getString("keyword");
                        String site = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
                        String listUrl = "";
                        switch (site){
                            case SITE_NEWS:
                                listUrl = String.format(searchNewsUrlFormat, URLEncoder.encode(keyword,"UTF-8"),1);
                                break;
                            case SITE_FORUM:
                                listUrl = String.format(searchBBSUrlFormat, URLEncoder.encode(keyword,"UTF-8"),1);
                                break;
                            default:
                                listUrl = "";
                                break;
                        }
                        if (StringUtils.isBlank(listUrl)){
                            logger.error("unknown site with the start record {}", site);
                            return allItemRecords;
                        }
                        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .recordKey(listUrl)
                                .httpUrl(listUrl)
                                .releaseTime(System.currentTimeMillis())
                                .copyBizTags()
                                .copyScheduleTags()
                                .notFilterRecord()
                                .build();
                        crawlerRequestRecord.getHttpRequest().setMethod("GET");
                        crawlerRequestRecord.tagsCreator().bizTags().addKeywords(keyword);
                        allItemRecords.add(crawlerRequestRecord);
                    }
                }

            }catch (Exception e){
                logger.error(e.getMessage(),e);
            }
        }
        if (allItemRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allItemRecords;

    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || (statusCode != 200 && statusCode != 404)){
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.getHttpRequest().getHeaders().remove("Host");
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            logger.error("{} --> this link has been reload status code is :{}",lastRequestUrl, statusCode);
            return parsedLinks;
        }
        if (statusCode == 404){
            logger.error("{} --> this link is 404", lastRequestUrl);
            return parsedLinks;
        }
        if (lastRequestUrl.matches(listUrlRegex)){
            return parseListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(bbsUrlRegex)){
            return parseBBSLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseBBSLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        Map<String, Object> extras = crawlerRequestRecord.getHttpRequest().getExtras();
        if (extras == null){
            extras = new HashMap<>();
        }
        Map<String, Object> urlParams = getUrlParams(crawlerRequestRecord.getHttpRequest().getUrl());
        JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
        String id = (String) urlParams.get("id");
        int curPage = Integer.parseInt(String.valueOf(urlParams.get("page")));
        extras.put("articleKey",id);
        //捕捉404
        String idxyerError = pageObj.getString("idxyer_error");
        if (null != idxyerError){
            logger.error("this page is missing, code --> {}, url --> {}",idxyerError,crawlerRequestRecord.getHttpRequest().getUrl());
            crawlerRequestRecord.setNeedParsedPage(false);
            crawlerRequestRecord.setNeedWashPage(false);

            return parsedLinks;
        }
        int totalPage = pageObj.getJSONObject("pageBean").getIntValue("total");
        int count = pageObj.getJSONObject("pageBean").getIntValue("count");
        JSONArray jsonArray = pageObj.getJSONArray("items");
        for (Object o : jsonArray) {
            JSONObject itemObj = (JSONObject)o;
            int floor = itemObj.getIntValue("floor");
            if (floor == 1){
                String username = itemObj.getString("username");
                String userUrl = String.format(userInfoUrlFormat,URLEncoder.encode(username));
                CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(userUrl)
                        .recordKey(userUrl)
                        .releaseTime(itemObj.getLongValue("postTime"))
                        .notFilterRecord()
                        .copyBizTags()
                        .resultLabelTag(article)
                        .resultLabelTag(interaction)
                        .needWashed(true)
                        .needParsed(false)
                        .build();
                Map<String, Object> newExtras = copyExtras(extras);
                newExtras.put("pageObj",pageObj);
                requestRecord.getHttpRequest().setExtras(newExtras);
                parsedLinks.add(requestRecord);
            }else {
                String username = itemObj.getString("username");
                String cmtId = itemObj.getString("id");
                String userUrl = String.format(userInfoUrlFormat,URLEncoder.encode(username));
                CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(userUrl)
                        .recordKey(userUrl + "#" + cmtId)
                        .releaseTime(itemObj.getLongValue("postTime"))
                        .notFilterRecord()
                        .copyBizTags()
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .needWashed(true)
                        .needParsed(false)
                        .build();
                Map<String,Object> itemExtras = new HashMap<>();
                itemExtras.put("itemObj",itemObj);
                itemExtras.put("articleKey",id);
                requestRecord.getHttpRequest().setExtras(itemExtras);
                parsedLinks.add(requestRecord);
            }
        }
        if (curPage < totalPage){
            String commentUrl = String.format(bbsUrlFormat,id, (curPage + 1));
            CrawlerRequestRecord cmtRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(commentUrl)
                    .recordKey(commentUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .notFilterRecord()
                    .build();
            cmtRecord.getHttpRequest().setExtras(copyExtras(extras));
            parsedLinks.add(cmtRecord);
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = httpRequest.getUrl();
        Map<String, Object> urlParams = getUrlParams(lastRequestUrl);
        if (null != urlParams && urlParams.size() > 0){
            String keyword = (String) urlParams.get("words");
            int curPage = Integer.parseInt((String) urlParams.get("page"));
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            String nextPageUrl = "";
            switch (site){
                case SITE_NEWS:
                    nextPageUrl = String.format(searchNewsUrlFormat,keyword,(curPage + 1));
                    break;
                case SITE_FORUM:
                    nextPageUrl = String.format(searchBBSUrlFormat,keyword,(curPage + 1));
                    break;
                default:
                    nextPageUrl = "";
                    break;
            }
            if (StringUtils.isNotBlank(nextPageUrl)){
                CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextPageUrl)
                        .recordKey(nextPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                parsedLinks.add(nextPageRecord);
            }
        }

        //解析列表
        List<Selectable> itemNodes = httpPage.getHtml().xpath("//div[@class=\"main-list\"]/div").nodes();
        for (Selectable itemNode : itemNodes) {
            String itemUrl = itemNode.xpath("./h3/a/@href").get();
            List<String> allTimes = itemNode.xpath(".//p[@class=\"it-author\"]/text()").all();
            String pubTime = "";
            for (String allTime : allTimes) {
                allTime = allTime.trim();
                if (allTime.matches("\\d{4}-\\d{2}-\\d{2}\\s*\\d{2}:\\d{2}:\\d{2}")){
                    pubTime = allTime;
                }
            }
            try {
                if (itemUrl.contains("/bbs/")){
                    String topicKey = itemUrl.substring(itemUrl.lastIndexOf("/") + 1).split("\\?")[0];
                    itemUrl = String.format(bbsUrlFormat,topicKey,1);
                }
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .recordKey(itemUrl)
                        .releaseTime(DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm:ss").getTime())
                        .copyBizTags()
                        .needWashed(false)
                        .build();
                if (crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site").equals(SITE_NEWS)){
                    itemRecord.setNeedParsedPage(false);
                    itemRecord.setNeedWashPage(true);
                    itemRecord.tagsCreator().resultTags().addResultDataType(article);
                    itemRecord.tagsCreator().resultTags().addResultDataType(interaction);
                }
                Map<String,Object> extras = new HashMap<>();
                extras.put("listUrl",lastRequestUrl);
                itemRecord.getHttpRequest().setExtras(extras);
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                logger.error("parse date error with pubTime ---> {}",pubTime);
            }
        }
        return parsedLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (StringUtils.isBlank(httpPage.getRawText())){
            return null;
        }
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = httpRequest.getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site").equals(SITE_NEWS)){
            return washNewsPage(crawlerRequestRecord,httpPage,crawlerDataList);
        }
        if (crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site").equals(SITE_FORUM)){
            return washBBSPage(crawlerRequestRecord,httpPage,crawlerDataList);
        }
        return crawlerDataList;
    }


    private List<CrawlerData> washBBSPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerData> crawlerDataList) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String requestUrl = httpRequest.getUrl();
        Map<String, Object> extras = httpRequest.getExtras();
        try {
            String articleKey = (String) extras.get("articleKey");
            String articleUrl = String.format(topicUrlFormat,articleKey);
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            JSONObject userObj = JSONObject.parseObject(httpPage.getRawText()).getJSONObject("items");

            if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
                JSONObject pageObj = (JSONObject) extras.get("pageObj");

                String listUrl = (String) extras.get("listUrl");
                JSONObject topicObj = pageObj.getJSONArray("items").getJSONObject(0);
                String title = topicObj.getString("subject");
                String body = topicObj.getString("body");
                Html html = new Html(body);
                StringBuffer sbContent = new StringBuffer();
                List<String> allContents = html.xpath("//p/text()").all();
                for (String allContent : allContents) {
                    sbContent.append(allContent);
                }
                StringBuffer sbImage = new StringBuffer();
                List<String> allImages = html.xpath("//img/@src").all();
                for (String allImage : allImages) {
                    sbImage.append(allImage).append("\\x01");
                }
                String author = topicObj.getString("nickname");
                String authorId = topicObj.getString("userId");
                String floor = topicObj.getString("floor");

                String follows = userObj.getString("followerCount");
                int totalCount = userObj.getIntValue("postCount");
                int replyCount = userObj.getIntValue("feedCount");
                String topicCount = String.valueOf(totalCount - replyCount);
                if (topicCount.contains("-")){
                    topicCount = topicCount.replace("-","");
                }
                String pickCount = userObj.getString("postPicked");
                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord,httpPage)
                        .url(articleUrl)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .addContentKV(Field_Title,title)
                        .addContentKV(Field_Content,sbContent.toString())
                        .addContentKV(Field_Author,author)
                        .addContentKV(Field_Author_Id,authorId)
                        .addContentKV(Field_Author_Follows,follows)
                        .addContentKV(Field_Author_Topic_Count,topicCount)
                        .addContentKV(Field_Author_Reply_Count,String.valueOf(replyCount))
                        .addContentKV(Field_Author_Pick_Count,pickCount)
                        .addContentKV(Field_Urls, listUrl)
                        .addContentKV(Field_Floor,floor)
                        .addContentKV(Field_Images,sbImage.toString())
                        .resultLabelTag(article)
                        .build();
                crawlerDataList.add(crawlerData);
                if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                    String views = topicObj.getString("reads");
                    String comments = pageObj.getJSONObject("pageBean").getString("count");
                    String likes = topicObj.getString("votes");
                    String collections = topicObj.getString("favs");
                    CrawlerData crawlerCmtData = CrawlerData.builder()
                            .data(crawlerRequestRecord,httpPage)
                            .url(articleUrl)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), articleKey))
                            .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                            .releaseTime(System.currentTimeMillis())
                            .addContentKV(Field_I_Likes,likes)
                            .addContentKV(Field_I_Views,views)
                            .addContentKV(Field_I_Comments,comments)
                            .addContentKV(Field_I_Collection,collections)
                            .resultLabelTag(interaction)
                            .build();
                    crawlerDataList.add(crawlerCmtData);
                }
            }
            if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
                JSONObject itemObj = (JSONObject) extras.get("itemObj");
                String itemId = itemObj.getString("id");
                String author = itemObj.getString("nickname");
                String authorId = itemObj.getString("userId");
                String floor = itemObj.getString("floor");
                String body = itemObj.getString("body");
                Html html = new Html(body);
                StringBuffer sbContent = new StringBuffer();
                List<String> allContents = html.xpath("//p/text()").all();
                for (String allContent : allContents) {
                    sbContent.append(allContent);
                }
                if (allContents.size() == 0){
                    sbContent.append(body);
                }
                StringBuffer sbImage = new StringBuffer();
                List<String> allImages = html.xpath("//img/@src").all();
                for (String allImage : allImages) {
                    sbImage.append(allImage).append("\\x01");
                }
                String follows = userObj.getString("followerCount");
                int totalCount = userObj.getIntValue("postCount");
                int replyCount = userObj.getIntValue("feedCount");
                String topicCount = String.valueOf(totalCount - replyCount);
                if (topicCount.contains("-")){
                    topicCount = topicCount.replace("-","");
                }
                String pickCount = userObj.getString("postPicked");

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord,httpPage)
                        .url(articleUrl)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), itemId))
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .addContentKV(Field_Content,sbContent.toString())
                        .addContentKV(Field_Author,author)
                        .addContentKV(Field_Author_Id,authorId)
                        .addContentKV(Field_Author_Follows,follows)
                        .addContentKV(Field_Author_Topic_Count,topicCount)
                        .addContentKV(Field_Author_Reply_Count,String.valueOf(replyCount))
                        .addContentKV(Field_Author_Pick_Count,pickCount)
                        .addContentKV(Field_Floor,floor)
                        .addContentKV(Field_Images,sbImage.toString())
                        .resultLabelTag(comment)
                        .build();
                crawlerDataList.add(crawlerData);

                if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                    String views = itemObj.getString("reads");
                    String likes = itemObj.getString("votes");
                    String collections = itemObj.getString("favs");
                    CrawlerData crawlerCmtData = CrawlerData.builder()
                            .data(crawlerRequestRecord,httpPage)
                            .url(articleUrl)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), itemId))
                            .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), itemId))
                            .releaseTime(System.currentTimeMillis())
                            .addContentKV(Field_I_Likes,likes)
                            .addContentKV(Field_I_Views,views)
                            .addContentKV(Field_I_Collection,collections)
                            .resultLabelTag(interaction)
                            .build();
                    crawlerDataList.add(crawlerCmtData);
                }
            }
        }catch (Exception e){
            logger.error("JSON parse page error,download code --> {}, page download success --> {}",httpPage.getStatusCode(),httpPage.isDownloadSuccess());
        }
        return crawlerDataList;
    }

    private List<CrawlerData> washNewsPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerData> crawlerDataList) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String requestUrl = httpRequest.getUrl();
        Map<String, Object> extras = httpRequest.getExtras();
        Html html = httpPage.getHtml();
        String articleKey = requestUrl.substring(requestUrl.lastIndexOf("/") + 1).split("\\?")[0];
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            String title = html.xpath("//h1[@class=\"article-title\"]/text()|//h1[@class=\"title\"]/text()|//h1/text()").get();
            String author = html.xpath("//h3[@class=\"name\"]/a/text()|//span[@class=\"it\"][3]/text()").get();
            String source = html.xpath("//span[@class=\"source\"]/text()|//span[@class=\"it\"]/a/text()|//span[@class=\"it\"][2]/text()|//span[@class=\"mr10\"][2]/a/text()").get();
            String authorId = html.xpath("//h3[@class=\"name\"]/a/@href").get();
            if (StringUtils.isNotBlank(author) && author.contains("作者")){
                author = author.substring(3);
            }
            if (StringUtils.isNotBlank(source) && source.contains("来源")){
                source = source.substring(3);
            }

            if (StringUtils.isNotBlank(authorId)){
                authorId = authorId.substring(authorId.lastIndexOf("/") + 1).split("\\?")[0];
            }else {
                authorId = "";
            }
            StringBuffer sbContent = new StringBuffer();
            List<String> allContents = html.xpath("//div[@class=\"dxy-article\"]/p//text()|//div[@class=\"article article__detail\"]/p//text()").all();
            for (String allContent : allContents) {
                sbContent.append(allContent);
            }
            StringBuffer sbImage = new StringBuffer();
            List<String> allImages = html.xpath("//div[@class=\"dxy-article\"]/p//img/@data-bjh-origin-src|//div[@class=\"article article__detail\"]/p//img/@src").all();
            for (String allImage : allImages) {
                sbImage.append(allImage).append("\\0x1");
            }
            String listUrl = extras == null ? null : (String) extras.get("listUrl");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .url(requestUrl)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Title,title)
                    .addContentKV(Field_Content,sbContent.toString())
                    .addContentKV(Field_Author,author)
                    .addContentKV(Field_Source,source)
                    .addContentKV(Field_Author_Id,authorId)
                    .addContentKV(Field_Urls, listUrl)
                    .addContentKV(Field_Images,sbImage.toString())
                    .resultLabelTag(article)
                    .build();
            List<String> allTags = html.xpath("//div[@class=\"tag-list\"]/a/text()").all();
            crawlerData.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type,allTags);
            crawlerDataList.add(crawlerData);
        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            String likes = html.xpath("//span[@class=\"count\"]/text()").get();
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .url(httpRequest.getUrl())
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), articleKey))
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(Field_I_Likes,likes)
                    .resultLabelTag(interaction)
                    .build();
            crawlerDataList.add(crawlerData);
        }
        return crawlerDataList;
    }


    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(keysRegex);
        addUrlRegular(listUrlRegex);
        addUrlRegular(newsUrlRegex);
        addUrlRegular(topicUrlRegex);
        addUrlRegular(bbsUrlRegex);
        addUrlRegular(userInfoRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return true;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "dxy";
    }

    /**
     * 将url参数转换成map
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    public static Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = null;
        if (url.contains("?")){
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }
    public static Map<String, Object> copyExtras(Map<String,Object> inExtras){
        try {
            Map<String,Object> extras = new HashMap<>();
            for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
                extras.put(entry.getKey(),entry.getValue());
            }
            return extras;
        } catch (Exception e) {
            return null;
        }
    }
}
