package com.chance.cc.crawler.development.scripts.baidu;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.util.UriEncoder;

import java.io.FileInputStream;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_PostClient;

/**
 * @author lt
 * @version 1.0
 * @date 2021-03-26 13:54:43
 * @email okprog@sina.com
 */
public class BaiDuTieBaCrawlerScript extends CrawlerCommonScript {
    private static Logger logger = LoggerFactory.getLogger(BaiDuTieBaCrawlerScript.class);


    public static final String domain = "baidu";
    private static final String scriptSite = "tieba";

    public static final String indexRegex = "https://tieba\\.baidu\\.com/index\\.html";
    public static final String keysRegex = "https?://\\S*v1/meta/"+domain+"/keys\\S*";
    public static final String searchTopicListUrlRegex = "https://tieba\\.baidu\\.com/f/search/res\\S*";
    public static final String topicUrlRegex = "https?://tieba\\.baidu\\.com/p/\\d*\\?pn=\\d*";
    public static final String commentUrlRegex = "https:?//tieba\\.baidu\\.com/p/comment\\S*";
    public static final String searchPostBarUrlRegex = "https?://tieba\\.baidu\\.com/f\\?kw=\\S*&ie=utf-8&pn=\\d*";

    //only_thread=1 贴子发布时间排序 || only_thread=0 最新回复时间排序
    public static final String searchTopicListUrlFormat = "https://tieba.baidu.com/f/search/res?isnew=1&qw=%s&rn=10&only_thread=%s&sm=1&pn=%s";
    public static final String topicUrlFormat = "https://tieba.baidu.com/p/%s?pn=%s";
    public static final String commentUrlFormat = "https://tieba.baidu.com/p/comment?tid=%s&pid=%s&pn=%s&t=%s";
    public static final String searchPostBarUrlFormat = "https://tieba.baidu.com/f?kw=%s&ie=utf-8&pn=%s";

    public static final String SEARCH_POST_BAR = "search_post_bar";
    public static final String SEARCH_POST_TOPIC = "search_post_topic";
    public static final String SEARCH_POST_TOPIC_MODEL = "search_post_topic_model";
    public static final String MODEL_REPLY = "model_reply";
    public static final String MODEL_publish = "model_publish";

    public static final String httpConfigSite = "tieba";
    private static LinkedBlockingQueue<String> userAgents = new LinkedBlockingQueue<>();
    public static Object uaObject = new Object();

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();

        requestRecord.getHttpRequest().addHeader("User-Agent",getUA());
        requestRecord.getHttpConfig().setHttpSite(httpConfigSite);

        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)){
            try {
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                if (jsonObject.getIntValue("status") == 0){
                    JSONArray objects = jsonObject.getJSONArray("content");
                    CategoryTag categoryTag = requestRecord.tagsCreator().scheduleTags().getCategoryTag();
                    for (Object object : objects) {
                        String keyword = ((JSONObject)object).getString("keyword");
                        if (categoryTag.isContainKVTag(SEARCH_POST_TOPIC)){
                            //搜帖子
                            String crawlerModel = categoryTag.getKVTagStrVal(SEARCH_POST_TOPIC_MODEL);
                            CrawlerRequestRecord crawlerRequestRecord = null;
                            switch (crawlerModel){
                                case MODEL_REPLY :
                                    String replyTimeListUrl = String.format(searchTopicListUrlFormat, UriEncoder.encode(keyword), 0, 1);
                                    crawlerRequestRecord = getTimeListRecord(requestRecord,replyTimeListUrl);
                                    break;
                                case MODEL_publish :
                                    String publishTimeListUrl = String.format(searchTopicListUrlFormat, UriEncoder.encode(keyword), 1, 1);
                                    crawlerRequestRecord = getTimeListRecord(requestRecord,publishTimeListUrl);
                                    break;
                                default:
                                    logger.error("search post topic missing crawler model please check your crawler job");
                                    return super.prepareRequest(requestRecord,supportSourceRecords);
                            }
                            crawlerRequestRecord.tagsCreator().bizTags().addKeywords(keyword);
                            allItemRecords.add(crawlerRequestRecord);
                        }
                        if (categoryTag.isContainKVTag(SEARCH_POST_BAR)){
                            //搜贴吧
                            String listUrl = String.format(searchPostBarUrlFormat,URLEncoder.encode(keyword), 0);
                            CrawlerRequestRecord crawlerRequestRecord = getTimeListRecord(requestRecord,listUrl);
                            crawlerRequestRecord.tagsCreator().bizTags().addKeywords(keyword);
                            allItemRecords.add(crawlerRequestRecord);
                        }
                    }
                }
            }catch (Exception e){
                logger.error("from keywords init urls failed");
                logger.error(e.getMessage(),e);
            }
        }
        if (allItemRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allItemRecords;
    }

    private String getUA(){
        if(userAgents.size() <1){
            synchronized (uaObject){
                if (userAgents.size() <1){
                    try {
                        userAgents.addAll(IOUtils.readLines(new FileInputStream("/data/chance_crawler_runner/data/ua/ua_baidu_tieba.txt"), "utf-8"));
                    } catch (IOException e) {
                        logger.error(e.getMessage());
                    }
                }
            }
        }

        String ua = userAgents.poll();
        logger.info("poll new ua: "+ ua);
        userAgents.add(ua);
        return ua;
    }

    private CrawlerRequestRecord getTimeListRecord(CrawlerRequestRecord requestRecord, String listUrl) {
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(requestRecord)
                .httpUrl(listUrl)
                .recordKey(listUrl)
                .releaseTime(System.currentTimeMillis())
                .notFilterRecord()
                .copyBizTags()
                .build();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        httpRequest.addHeader("User-Agent",getRandomUA());
        httpRequest.addHeader("Host","tieba.baidu.com");
        httpRequest.addHeader("Upgrade-Insecure-Requests","1");
        httpRequest.addHeader("Connection","keep-alive");
        httpRequest.addHeader("Cache-Control","max-age=0");
        httpRequest.addHeader("Accept-Language","zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7");
        httpRequest.addHeader("Accept-Encoding","gzip, deflate, br");
        httpRequest.addHeader("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9");
        return crawlerRequestRecord;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        if (404 == httpPage.getStatusCode()){
            logger.info("status code is 404");
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        if (doHttpPageCheck(crawlerRequestRecord,httpPage)){
            if(httpPage.getStatusCode() == 403){
                logger.error("status is [{}],httpPage detail is [{}]",httpPage.getStatusCode(),JSONObject.toJSONString(httpPage));
            }
            CrawlerBusinessTags crawlerBusinessTags = crawlerRequestRecord.tagsCreator().bizTags();
            String downloadTimes = crawlerBusinessTags.getCategoryTag().getKVTagStrVal("downloadTimes");
            if (StringUtils.isBlank(downloadTimes)){
                crawlerBusinessTags.addCustomKV("downloadTimes",1);
            }else {
                int times = Integer.parseInt(downloadTimes);
                crawlerBusinessTags.addCustomKV("downloadTimes",times + 1);
                if (times > 10){
                    logger.error("link download too many times");
                    return parsedLinks;
                }
            }
            String uuid = UUID.randomUUID().toString().replaceAll("-", "");
            lastRequest.addHeader("Cookie","wise_device=0;BAIDUID="+uuid+":FG=1");
//                lastRequest.addHeader("Cookie","BAIDUID=8CAB740F07668E923E5D087363ED2BB1:FG=1");
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            parsedLinks.add(crawlerRequestRecord);
            return parsedLinks;
        }
        crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove("downloadTimes");

        String lastRequestUrl = lastRequest.getUrl();
        if (lastRequestUrl.matches(searchTopicListUrlRegex)){
            return parseSearchTopicListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(searchPostBarUrlRegex)){
            return parsePostBarLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(topicUrlRegex)){
            return parseTopicLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(commentUrlRegex)){
            return parseCommentLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parsePostBarLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        Html html = httpPage.getHtml();
        List<Selectable> itemNodes = html.xpath("//ul[@id=\"thread_list\"]/li[contains(@class,\"j_thread_list\")]").nodes();
        if (null != urlParams && null != itemNodes && itemNodes.size() > 0){
            String keyword = (String)urlParams.get("kw");
            String pageNumStr = (String)urlParams.get("pn");
            int pageNum = Integer.parseInt(pageNumStr);
            if (itemNodes.size() >= 20){
                pageNum += 50;
                String nextPageUrl = String.format(searchPostBarUrlFormat,keyword,pageNum);
                CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextPageUrl)
                        .recordKey(nextPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                HttpRequest nextPageRequest = nextPageRecord.getHttpRequest();
                nextPageRequest.addHeader("User-Agent",getRandomUA());
                nextPageRequest.addHeader("Host","tieba.baidu.com");
//                nextPageRequest.addHeader("Upgrade-Insecure-Requests","1");
//                nextPageRequest.addHeader("Connection","keep-alive");
//                nextPageRequest.addHeader("Cache-Control","max-age=0");
//                nextPageRequest.addHeader("Accept-Language","zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7");
//                nextPageRequest.addHeader("Accept-Encoding","gzip, deflate, br");
//                nextPageRequest.addHeader("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9");
                String uuid = UUID.randomUUID().toString().replaceAll("-", "");
                nextPageRequest.addHeader("Cookie","wise_device=0;BAIDUID="+uuid+":FG=1");
//                nextPageRequest.addHeader("Cookie","BAIDUID=8CAB740F07668E923E5D087363ED2BB1:FG=1");
                parsedLinks.add(nextPageRecord);
            }
            for (Selectable itemNode : itemNodes) {
                String topicId = itemNode.xpath("./@data-tid").get();
                String topicUrl = String.format(topicUrlFormat,topicId,1);
                List<String> strings = itemNode.xpath("./div/div/div/div/span[contains(@class,\"j_reply_data\")]//text()").all();
                StringBuilder sbTime = new StringBuilder();
                for (String string : strings) {
                    sbTime.append(unescapeHtml2J(string).trim());
                }
                String time = sbTime.toString();
                long releaseTime = parseListTime(time);
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(topicUrl)
                        .recordKey(topicUrl)
                        .releaseTime(releaseTime)
                        .copyBizTags()
                        .resultLabelTag(article)
                        .resultLabelTag(interaction)
                        .needParsed(true)
                        .needWashed(false)
                        .build();
                HttpRequest itemRequest = itemRecord.getHttpRequest();
                itemRequest.addHeader("User-Agent",getRandomUA());
                itemRequest.addHeader("Host","tieba.baidu.com");
//                itemRequest.addHeader("Upgrade-Insecure-Requests","1");
//                itemRequest.addHeader("Connection","keep-alive");
//                itemRequest.addHeader("Cache-Control","max-age=0");
//                itemRequest.addHeader("Accept-Language","zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7");
//                itemRequest.addHeader("Accept-Encoding","gzip, deflate, br");
//                itemRequest.addHeader("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9");
//                itemRequest.addHeader("Referer","https://wappass.baidu.com/");
                String uuid = UUID.randomUUID().toString().replaceAll("-", "");
                itemRequest.addHeader("Cookie","wise_device=0;BAIDUID="+uuid+":FG=1");
//                itemRequest.addHeader("Cookie","BAIDUID=8CAB740F07668E923E5D087363ED2BB1:FG=1");
                parsedLinks.add(itemRecord);
            }
        }
        return parsedLinks;
    }

    private static long parseListTime(String time){
        Calendar calendar = Calendar.getInstance();
        if (time.matches("\\d*:\\d*")){
            String[] split = time.split(":");
            int hour = Integer.parseInt(split[0]);
            int minute = Integer.parseInt(split[1]);
            calendar.set(Calendar.HOUR_OF_DAY,hour);
            calendar.set(Calendar.MINUTE,minute);
            calendar.set(Calendar.SECOND,0);
            return calendar.getTimeInMillis();
        }
        if (time.matches("\\d*-\\d*")){
            String[] split = time.split("-");
            int month = Integer.parseInt(split[0]);
            int day = Integer.parseInt(split[1]);
            calendar.set(Calendar.MONTH,month - 1);
            calendar.set(Calendar.DAY_OF_MONTH,day);
            calendar.set(Calendar.HOUR_OF_DAY,0);
            calendar.set(Calendar.MINUTE,0);
            calendar.set(Calendar.SECOND,0);
            return calendar.getTimeInMillis();
        }
        return 0;
    }


    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Html html = httpPage.getHtml();
        List<Selectable> commentNodes = html.xpath("//li").nodes();
        if (commentNodes.size() >= 1){
            crawlerRequestRecord.setNeedWashPage(true);
            crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(comment);
        }
        if (commentNodes.size() > 1){
            //翻页
            String httpRequestUrl = httpRequest.getUrl();
            Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
            String pn = (String) urlParams.get("pn");
            int pageNo = Integer.parseInt(pn);
            pageNo += 1;
            String tid = (String)urlParams.get("tid");
            String pid = (String)urlParams.get("pid");
            String nextPageUrl = String.format(commentUrlFormat,tid,pid,pageNo,System.currentTimeMillis());
            CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextPageUrl)
                    .recordKey(nextPageUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .copyBizTags()
                    .build();
            nextRecord.getHttpRequest().setExtras(copyExtras(httpRequest.getExtras()));
            parsedLinks.add(nextRecord);
        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseTopicLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        crawlerRequestRecord.setNeedWashPage(true);
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Html html = httpPage.getHtml();
        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        String topicId = httpRequestUrl.substring(httpRequestUrl.lastIndexOf("/") + 1).split("\\?")[0];
        httpRequest.addExtra("topicId",topicId);
        if (null != urlParams){
            String pn = (String) urlParams.get("pn");
            int pageNo = Integer.parseInt(pn);
            List<Selectable> allPosts = html.xpath("//div[@class=\"p_postlist\"]/div").nodes();
            String totalPageStr = html.xpath("//div[@class=\"pb_footer\"]//ul/li[@class=\"l_reply_num\"]/span[2]/text()").get();
            Matcher mtPage = Pattern.compile("\"total_page\":\\d*").matcher(httpPage.getRawText());
            while (mtPage.find()){
                totalPageStr = mtPage.group(0).split(":")[1];
            }
            if (pageNo == 1){
                if (pageNo < Integer.parseInt(totalPageStr)){
                    pageNo = Integer.parseInt(totalPageStr);
                    CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
                    if (categoryTag.getLabelTag(comment.enumVal()) != null) {
                        if (!crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                            logger.error("baidu crawler comment need to filter information!");
                            return parsedLinks;
                        }
                        KVTag filterInfoTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                        CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);

                        String nextPageUrl = String.format(topicUrlFormat,topicId,pageNo);
                        CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(crawlerRequestRecord)
                                .httpUrl(nextPageUrl)
                                .recordKey(nextPageUrl)
                                .releaseTime(crawlerRequestRecord.getReleaseTime())
                                .copyBizTags()
                                .notFilterRecord()
                                .build();
                        nextPageRecord.setFilter(filterInfoRecord.getFilter());
                        nextPageRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                        nextPageRecord.getHttpRequest().setHeaders(httpRequest.getHeaders());
                        String uuid = UUID.randomUUID().toString().replaceAll("-", "");
                        nextPageRecord.getHttpRequest().addHeader("Cookie","wise_device=0;BAIDUID="+uuid+":FG=1");
//                        nextPageRecord.getHttpRequest().addHeader("Cookie","BAIDUID=8CAB740F07668E923E5D087363ED2BB1:FG=1");
                        parsedLinks.add(nextPageRecord);
                    }
                }
            }
            if (pageNo > 2){
                //翻页
                pageNo -= 1;
                String nextPageUrl = String.format(topicUrlFormat,topicId,pageNo);
                CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextPageUrl)
                        .recordKey(nextPageUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .copyBizTags()
                        .needWashed(false)
                        .needParsed(true)
                        .build();
                nextPageRecord.getHttpRequest().setHeaders(httpRequest.getHeaders());
                nextPageRecord.getHttpRequest().addHeader("Cookie","wise_device=0;");
                parsedLinks.add(nextPageRecord);
            }

            //判断是否采集评论
            CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
            if (categoryTag.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null) {
                if (!crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                    logger.error("baidu crawler comment need to filter information!");
                    return parsedLinks;
                }
                crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(comment);
                crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(interaction);
                KVTag filterInfoTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                //单条解析
                for (Selectable allPost : allPosts) {
                    String info = allPost.xpath("./@data-field").get();
                    try {
                        JSONObject infoObj = JSONObject.parseObject(unescapeHtml2J(info));
                        JSONObject contentObj = infoObj.getJSONObject("content");
                        int postNo = contentObj.getIntValue("post_no");
                        int commentNum = contentObj.getIntValue("comment_num");
                        if (postNo == 1 || commentNum == 0){
                            continue;
                        }
                        long releaseTime = 0;
                        try {
                            String pubTime = contentObj.getString("date");
                            releaseTime = DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm").getTime();
                        } catch (Exception e) {
                            continue;
                        }

                        String commentId = contentObj.getString("post_id");

                        String commentUrl = String.format(commentUrlFormat, topicId, commentId, 1, System.currentTimeMillis());
                        CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(crawlerRequestRecord)
                                .httpUrl(commentUrl)
                                .recordKey(commentUrl)
                                .releaseTime(releaseTime)
                                .notFilterRecord()
                                .copyBizTags()
                                .build();

                        commentRecord.setFilter(filterInfoRecord.getFilter());
                        commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                        HttpRequest commentRequest = commentRecord.getHttpRequest();
                        commentRequest.addExtra("topicId",topicId);
                        commentRequest.addExtra("commentId",commentId);
                        parsedLinks.add(commentRecord);
                    } catch (Exception e) {
                        logger.error("{} , info is : {}",e.getMessage(),info);
                    }
                }
            }
        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseSearchTopicListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        if (null != urlParams){
            String keyword = (String)urlParams.get("qw");
            String onlyThread = (String)urlParams.get("only_thread");
            String pageNumStr = (String)urlParams.get("pn");
            int pageNum = Integer.parseInt(pageNumStr);
            if (pageNum < 77){
                pageNum += 1;
                String nextPageUrl = String.format(searchTopicListUrlFormat,keyword,onlyThread,pageNum);
                CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextPageUrl)
                        .recordKey(nextPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                HttpRequest nextPageRequest = nextPageRecord.getHttpRequest();
                nextPageRequest.addHeader("User-Agent",getRandomUA());
                nextPageRequest.addHeader("Host","tieba.baidu.com");
                nextPageRequest.addHeader("Upgrade-Insecure-Requests","1");
                nextPageRequest.addHeader("Connection","keep-alive");
                nextPageRequest.addHeader("Cache-Control","max-age=0");
                nextPageRequest.addHeader("Accept-Language","zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7");
                nextPageRequest.addHeader("Accept-Encoding","gzip, deflate, br");
                nextPageRequest.addHeader("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9");
                parsedLinks.add(nextPageRecord);
            }
        }
        Html html = httpPage.getHtml();
        List<Selectable> itemNodes = html.xpath("//div[@class=\"s_post_list\"]/div[@class=\"s_post\"]").nodes();
        for (Selectable itemNode : itemNodes) {
            String topicId = itemNode.xpath("./span/a/@data-tid").get();
            String topicUrl = String.format(topicUrlFormat,topicId,1);
            String publishTime = itemNode.xpath("./font/text()").get();
            try {
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(topicUrl)
                        .recordKey(topicUrl)
                        .releaseTime(DateUtils.parseDate(publishTime,"yyyy-MM-dd HH:mm").getTime())
                        .copyBizTags()
                        .resultLabelTag(article)
                        .resultLabelTag(interaction)
                        .needParsed(true)
                        .needWashed(false)
                        .build();
                HttpRequest itemRequest = itemRecord.getHttpRequest();
                itemRequest.addHeader("User-Agent",getRandomUA());
                itemRequest.addHeader("Host","tieba.baidu.com");
                itemRequest.addHeader("Upgrade-Insecure-Requests","1");
                itemRequest.addHeader("Connection","keep-alive");
                itemRequest.addHeader("Cache-Control","max-age=0");
                itemRequest.addHeader("Accept-Language","zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7");
                itemRequest.addHeader("Accept-Encoding","gzip, deflate, br");
                itemRequest.addHeader("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9");
                String uuid = UUID.randomUUID().toString().replaceAll("-", "");
                itemRequest.addHeader("Cookie","wise_device=0;BAIDUID="+uuid+":FG=1");
//                itemRequest.addHeader("Cookie","BAIDUID=8CAB740F07668E923E5D087363ED2BB1:FG=1");
                parsedLinks.add(itemRecord);
            }catch (Exception e){
                logger.error("parse topic publish time error ,page is {}",httpRequestUrl);
            }
        }
        return parsedLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String requestUrl = httpRequest.getUrl();
        String topicId = (String) extras.get("topicId");
        String rawText = httpPage.getRawText();
        Html html = httpPage.getHtml();
        if (StringUtils.isBlank(rawText)){
            logger.error("httpPage is empty !");
            return crawlerDataList;
        }
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            Selectable aNode = html.xpath("//div[@class=\"p_postlist\"]/div[1]");
            String title = html.xpath("//h1[contains(@class,\"core_title_txt\")]/text()|//h3[contains(@class,\"core_title_txt\")]/text()").get();
            String dataPid = aNode.xpath("./@data-pid").get();
            List<String> contents = html.xpath("//div[@id=\"post_content_" + dataPid + "\"]//text()").all();
            List<String> images = html.xpath("//div[@id=\"post_content_" + dataPid + "\"]//img/@src").all();
            StringBuffer sbContent = new StringBuffer();
            for (String content : contents) {
                sbContent.append(unescapeHtml2J(content).trim());
            }
            StringBuffer sbImage = new StringBuffer();
            for (String image : images) {
                sbImage.append(image).append("\\x01");
            }

            String info = aNode.xpath("./@data-field").get();
            JSONObject infoObj = JSONObject.parseObject(unescapeHtml2J(info));
            JSONObject authorObj = infoObj.getJSONObject("author");
            JSONObject contentObj = infoObj.getJSONObject("content");
            String author = authorObj.getString("user_nickname");
            String authorId = authorObj.getString("user_id");
            String floor = contentObj.getString("post_no");
            String postClient = contentObj.getString("open_type");
            String comments = html.xpath("//div[@class=\"pb_footer\"]//ul/li[@class=\"l_reply_num\"]/span[1]/text()").get();
            String pubTime = contentObj.getString("date");
            if (StringUtils.isBlank(pubTime)){
                pubTime = aNode.xpath("./div/div/div/div/span[@class=\"tail-info\"][last()]/text()").get();
            }
            long releaseTime = 0;
            try {
                releaseTime = DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm").getTime();
            }catch (Exception e){
                logger.error(e.getMessage());
                releaseTime = 0;
            }
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .url(requestUrl)
                    .dataId(StringUtils.joinWith("-",domain(),site,article.enumVal(),topicId))
                    .releaseTime(releaseTime)
                    .addContentKV(Field_Title,title)
                    .addContentKV(Field_Author,author)
                    .addContentKV(Field_Author_Id,authorId)
                    .addContentKV(Field_Content,sbContent.toString())
                    .addContentKV(Field_Images,sbImage.toString())
                    .addContentKV(Field_Floor,floor)
                    .addContentKV(Tag_Field_PostClient,postClient)
                    .resultLabelTag(article)
                    .build();
            crawlerDataList.add(crawlerData);
            if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                CrawlerData crawlerInteractionData = CrawlerData.builder()
                        .data(crawlerRequestRecord,httpPage)
                        .url(requestUrl)
                        .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),topicId))
                        .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),topicId))
                        .releaseTime(releaseTime)
                        .addContentKV(Field_I_Comments,comments)
                        .resultLabelTag(interaction)
                        .build();
                crawlerDataList.add(crawlerInteractionData);
            }
        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            //两种：回贴  回帖的评论
            if (requestUrl.matches(topicUrlRegex)){
                List<Selectable> allPosts = html.xpath("//div[@class=\"p_postlist\"]/div").nodes();
                Collections.reverse(allPosts);
                for (Selectable allPost : allPosts) {
                    String info = allPost.xpath("./@data-field").get();
                    try {
                        JSONObject infoObj = JSONObject.parseObject(unescapeHtml2J(info));
                        JSONObject contentObj = infoObj.getJSONObject("content");
                        int postNo = contentObj.getIntValue("post_no");
                        String comments = contentObj.getString("comment_num");
                        if (postNo == 1){
                            continue;
                        }
                        JSONObject authorObj = infoObj.getJSONObject("author");
                        String author = authorObj.getString("user_nickname");
                        String authorId = authorObj.getString("user_id");
                        String floor = contentObj.getString("post_no");
                        String postClient = contentObj.getString("open_type");
                        String dataPid = allPost.xpath("./@data-pid").get();
                        if (StringUtils.isBlank(dataPid)){
                            continue;
                        }
                        List<String> contents = html.xpath("//div[@id=\"post_content_" + dataPid + "\"]//text()").all();
                        List<String> images = html.xpath("//div[@id=\"post_content_" + dataPid + "\"]//img/@src").all();
                        StringBuffer sbContent = new StringBuffer();
                        for (String content : contents) {
                            sbContent.append(unescapeHtml2J(content).trim());
                        }
                        StringBuffer sbImage = new StringBuffer();
                        for (String image : images) {
                            sbImage.append(image).append("\\x01");
                        }
                        String pubTime = contentObj.getString("date");
                        if (StringUtils.isBlank(pubTime)){
                            pubTime = allPost.xpath("./div/div/div/div/span[@class=\"tail-info\"][last()]/text()").get();
                        }
                        long releaseTime = 0;
                        try {
                            releaseTime = DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm").getTime();
                        }catch (Exception e){
                            logger.error(e.getMessage());
                            releaseTime = 0;
                        }

                        CrawlerData crawlerData = CrawlerData.builder()
                                .data(crawlerRequestRecord,httpPage)
                                .url(requestUrl)
                                .dataId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),dataPid))
                                .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),topicId))
                                .releaseTime(releaseTime)
                                .addContentKV(Field_Author,author)
                                .addContentKV(Field_Author_Id,authorId)
                                .addContentKV(Field_Content,sbContent.toString())
                                .addContentKV(Field_Images,sbImage.toString())
                                .addContentKV(Field_Floor,floor)
                                .addContentKV(Tag_Field_PostClient,postClient)
                                .resultLabelTag(comment)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .build();
                        crawlerDataList.add(crawlerData);
                        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                            CrawlerData crawlerInteractionData = CrawlerData.builder()
                                    .data(crawlerRequestRecord,httpPage)
                                    .url(requestUrl)
                                    .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),dataPid))
                                    .parentId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),dataPid))
                                    .releaseTime(releaseTime)
                                    .addContentKV(Field_I_Comments,comments)
                                    .resultLabelTag(interaction)
                                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                    .build();
                            crawlerDataList.add(crawlerInteractionData);
                        }
                    } catch (Exception e) {
                        logger.error("{} , info is : {}",e.getMessage(),info);
                    }
                }
            }
            if (requestUrl.matches(commentUrlRegex)){
                List<Selectable> cmtNodes = html.xpath("//li").nodes();
                for (Selectable cmtNode : cmtNodes) {
                    String info = cmtNode.xpath("./@data-field").get();
                    try {
                        JSONObject infoObj = JSONObject.parseObject(unescapeHtml2J(info));
                        String spid = infoObj.getString("spid");
                        if (StringUtils.isBlank(spid)){
                            continue;
                        }
                        String author = infoObj.getString("user_nickname");
                        List<String> contents = cmtNode.xpath("./div/span[@class=\"lzl_content_main\"]//text()").all();
                        StringBuffer sbContent = new StringBuffer();
                        for (String content : contents) {
                            sbContent.append(unescapeHtml2J(content).trim());
                        }
                        String pubTime = cmtNode.xpath(".//span[@class=\"lzl_time\"]/text()").get();
                        long releaseTime = 0;
                        try {
                            releaseTime = DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm").getTime();
                        }catch (Exception e){
                            logger.error(e.getMessage());
                            releaseTime = 0;
                        }
                        CrawlerData crawlerData = CrawlerData.builder()
                                .data(crawlerRequestRecord,httpPage)
                                .url(requestUrl)
                                .dataId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),spid))
                                .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),topicId))
                                .releaseTime(releaseTime)
                                .addContentKV(Field_Author,author)
                                .addContentKV(Field_Content,sbContent.toString())
                                .resultLabelTag(comment)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .build();
                        crawlerDataList.add(crawlerData);
                    }catch (Exception e){
                        logger.error(e.getMessage());
                    }
                }
            }
        }
        return crawlerDataList;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(keysRegex);
        addUrlRegular(searchTopicListUrlRegex);
        addUrlRegular(searchPostBarUrlRegex);
        addUrlRegular(topicUrlRegex);
        addUrlRegular(commentUrlRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }


    /**
     * 检查页面下载是否成功、完整
     * @param crawlerRequestRecord last record
     * @param httpPage page
     * @return boolean for page check
     */
    private boolean doHttpPageCheck(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, String> headers = lastRequest.getHeaders();
        if (!headers.containsKey("Cookie")){
            lastRequest.addHeader("Cookie","wise_device=0;");
        }
        String lastRequestUrl = lastRequest.getUrl();
        int statusCode = httpPage.getStatusCode();
        Map<String, Object> extras = lastRequest.getExtras();
        int downloadTimes = 0;
        if (null == extras){
            lastRequest.addExtra(lastRequestUrl,"0");
        }else {
            Object dt = extras.get(lastRequestUrl);
            if (null == dt){
                extras.put(lastRequestUrl,"0");
            }else {
                extras.put(lastRequestUrl,String.valueOf(Integer.parseInt((String) dt) + 1));
            }
            downloadTimes = Integer.parseInt((String) extras.get(lastRequestUrl));
        }

        if (downloadTimes > 10){
            logger.info("{} download to many times : {}",lastRequestUrl, downloadTimes);
            return false;
        }
        if (statusCode != 200){
            logger.error("download page {} error, status code is {}",lastRequestUrl,statusCode);
            return true;
        }
        if (!httpPage.isDownloadSuccess()){
            logger.error("download page failed, check your link {}",lastRequestUrl);
            return true;
        }
        if (StringUtils.isBlank(httpPage.getRawText())){
            logger.error("download page empty, check your link {}",lastRequestUrl);
            return true;
        }
        return false;
    }

    /**
     * 将url参数转换成map
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = url;
        if (url.contains("?")){
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }

    public static Map<String, Object> copyExtras(Map<String,Object> inExtras){
        Map<String,Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(),entry.getValue());
        }
        return extras;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    public static String unescapeHtml2J(String str){
        int times = 0;
        while (str.contains("&") && str.contains(";")){
            str = StringEscapeUtils.unescapeHtml(str);
            times ++;
            if (times > 5){
                break;
            }
        }
        return str;
    }

    @Test
    public void test(){
//        String keyword = "耐克";
//        String url = String.format(searchTopicListUrlFormat,keyword,0,1);
//        System.out.println(url);
//        String url1 = String.format(searchTopicListUrlFormat, URLEncoder.encode(keyword),0,1);
//        System.out.println(url1);
////        String url2 = String.format(searchTopicListUrlFormat,keyword,0,1);
////        System.out.println(URLEncoder.encode(url2));
        String s = "{&quot;author&quot;:{&quot;user_id&quot;:493631588,&quot;user_name&quot;:&quot;Junghee\\u00b0&quot;,&quot;props&quot;:null,&quot;portrait&quot;:&quot;tb.1.1f08ab09.T_VEFQqdlCNHOZsAb_eCLQ?t=1438837489&quot;,&quot;user_nickname&quot;:null},&quot;content&quot;:{&quot;post_id&quot;:139743699082,&quot;is_anonym&quot;:false,&quot;forum_id&quot;:241762,&quot;thread_id&quot;:7396056485,&quot;content&quot;:&quot;\\u6458\\u81easportskeeda\\u3002&lt;br&gt;Leon Edwards \\u5c06\\u4e8eUFC263\\u9762\\u5bf9Nate Diaz\\uff0c\\u5728 SBN \\u4e0a\\u63a5\\u53d7 MMA Fighting \\u91c7\\u8bbf\\u65f6\\u900f\\u9732Edwards \\u4fe1\\u5fc3\\u6ee1\\u6ee1\\uff0c\\u653e\\u51fa\\u8c6a\\u8a00\\u5185\\u7279\\u00b7\\u8fea\\u4e9a\\u5179\\u5c06\\u662f\\u5728\\u5bf9\\u9635\\u5361\\u9a6c\\u9c81\\u00b7\\u4e4c\\u65af\\u66fc\\u83b7\\u5f97\\u68a6\\u5bd0\\u4ee5\\u6c42\\u7684\\u51a0\\u519b\\u5934\\u8854\\u4e4b\\u524d\\u8981\\u514b\\u670d\\u7684\\u6700\\u540e\\u4e00\\u9053\\u969c\\u788d\\u3002&lt;br&gt;Leon Edwards\\u8ba4\\u4e3a\\u6253\\u8d25Nate Diaz\\u8fd9\\u6837\\u7684\\u5927\\u724c\\u9009\\u624b\\uff0c\\u5c06\\u4f1a\\u662f\\u4e00\\u4e2a\\u6311\\u6218\\u51a0\\u519b\\u7684\\u7edd\\u4f73\\u673a\\u4f1a\\uff0c\\u6240\\u4ee5\\u8981\\u7528\\u5c3d\\u5168\\u529b\\u6293\\u4f4f\\u4ed6\\u3002&lt;br&gt;&amp;quot;I don&amp;#39;t think there&amp;#39;s bad blood apart from competition blood. He&amp;#39;s the last guy to beat me. He got the decision over me. So I just want to get that one back. I believe I could have done way better in the fight. I&amp;#39;ve shown it over my last eight, nine fights.&amp;quot;&lt;br&gt;&amp;quot;That&amp;#39;s all it is. I want to get my win back. Be a world champion. He&amp;#39;s just the guy right now that&amp;#39;s got the belt and I&amp;#39;m going to have to defeat him to become world champion and that&amp;#39;s it,&amp;quot; added Leon Edwards.&lt;br&gt;&lt;img class=\\&quot;BDE_Image\\&quot; pic_type=\\&quot;0\\&quot; width=\\&quot;500\\&quot; height=\\&quot;326\\&quot; src=\\&quot;http:\\/\\/tiebapic.baidu.com\\/forum\\/w%3D580\\/sign=a9126aaa3af5e0feee1889096c6134e5\\/68cb01087bf40ad13bd4a250402c11dfa8ecce85.jpg\\&quot; size=\\&quot;519288\\&quot; &gt;&quot;,&quot;post_no&quot;:1,&quot;type&quot;:&quot;0&quot;,&quot;comment_num&quot;:0,&quot;is_fold&quot;:0,&quot;props&quot;:null,&quot;post_index&quot;:0,&quot;pb_tpoint&quot;:null}}\n";
        String s1 = unescapeHtml2J(s);
        System.out.println(s1);
    }
}
