package com.chance.cc.crawler.development.scripts.hupu;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/2/25 10:04
 * @Description 虎扑论坛
 **/
public class HupuCrawlerScript extends CrawlerCommonScript {

    private Logger log = LoggerFactory.getLogger(HupuCrawlerScript.class);
    private static final String DOMAIN = "hupu";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";
    private static final String redis = "redis";
    private static final String kafka = "kafka";
    private static final String result_article_url = "article_url";
    private static final String result_releaseTime = "releaseTime";
    private static final String DOMAIN_RESULT_JSON_RECORD_TAG = "domain_result_json";//初始record结果字段

    private static final String MODULE_ENTRACE_URL = "http[s]*://www.hupu.com/";
    private static final String MODULE_URL = "http[s]*://bbs.hupu.com/[0-9a-zA-Z\\d\\-\\/]+";

    private static final String SEARCH_ENTRANCE_URL = "https://my.hupu.com/search";
    private static final String SEARCH_URL = "https://my.hupu.com/search\\S+";
    private static final String SEARCH_PRIFIX = "https://my.hupu.com";

    private static final String ITEM_PREFIX = "https://bbs.hupu.com";
    private static final String ITEM_URL = "https://bbs.hupu.com/\\d+.html";
    private static final String INTERACTION_SOURCE_URL = "https://msa.hupu.com/thread_hit?tid=%s";
    private static final String COMMENT_URL = "https://bbs.hupu.com/\\S*.html#comment";
    private static final String COMMENT_REPLY_SOURCE_URL = "https://bbs.hupu.com/api/v2/reply/reply?tid=%s&pid=%s&maxpid=0";
    private static final String COMMENT_REPLY_URL = "https://bbs.hupu.com/api/v2/reply/reply\\S*";
    private static final String COOKIE = "bbs_2020=1; _dacevid3=3b16b26b.de34.159e.40d3.81c2315f14ac; Hm_lvt_c324100ace03a4c61826ef5494c44048=1614161750; __gads=ID=62f4ca940fdfe3e9:T=1614161751:S=ALNI_MZ4n8zaYcNhoEzUrqJfSCq7zlOxUA; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%22177d389a4e9338-0d3d73f2e75944-393e5b09-1327104-177d389a4ea611%22%2C%22%24device_id%22%3A%22177d389a4e9338-0d3d73f2e75944-393e5b09-1327104-177d389a4ea611%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E7%9B%B4%E6%8E%A5%E6%B5%81%E9%87%8F%22%2C%22%24latest_referrer%22%3A%22%22%2C%22%24latest_search_keyword%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC_%E7%9B%B4%E6%8E%A5%E6%89%93%E5%BC%80%22%7D%7D; smidV2=202102241816284dc4f610677b49f7c9ab822b474add6b0017ce92fcad1e2e0; csrfToken=Yy-r8QKp-zkKaCUY1BlsWSef; Hm_lvt_b241fb65ecc2ccf4e7e3b9601c7a50de=1614161749,1614216985; Hm_lvt_4fac77ceccb0cd4ad5ef1be46d740615=1614161749,1614216985; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%22177d389a4e9338-0d3d73f2e75944-393e5b09-1327104-177d389a4ea611%22%2C%22%24device_id%22%3A%22177d389a4e9338-0d3d73f2e75944-393e5b09-1327104-177d389a4ea611%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E7%9B%B4%E6%8E%A5%E6%B5%81%E9%87%8F%22%2C%22%24latest_referrer%22%3A%22%22%2C%22%24latest_search_keyword%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC_%E7%9B%B4%E6%8E%A5%E6%89%93%E5%BC%80%22%7D%7D; Hm_lpvt_4fac77ceccb0cd4ad5ef1be46d740615=1614220392; Hm_lpvt_b241fb65ecc2ccf4e7e3b9601c7a50de=1614220392; _HUPUSSOID=40270922-a5e2-4705-aee9-c21a5378e059; _CLT=b0c2a05996d8b48b354e1fa4ddfc1fef; u=88663029|6JmO5omRSlIwMDAzMzg5MTM2|7e95|eb699a8b8bb94cc804cc6285ca63635e|8bb94cc804cc6285|aHVwdV8xNWU1MWIwMTQ4MWU1Mzcy; us=a2a15af6b5bcf65f97be74a934682b679606ecdd96637d91c724d643e5ebd1a698f876cf49963fc5db0bb6408047e82056aeefa7f28c6959af9872b133ed4517; ua=53807911; acw_tc=781bad2416142386258241213e7888ee8870a4348a157b09b62789699e2b72";


    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return DOMAIN;
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular(SEARCH_ENTRANCE_URL);
        addUrlRegular(SEARCH_URL);
        addUrlRegular(MODULE_ENTRACE_URL);
        addUrlRegular(MODULE_URL);
        addUrlRegular(ITEM_URL);
        addUrlRegular(COMMENT_URL);
        addUrlRegular(COMMENT_REPLY_URL);
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     *
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        return true;
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();

        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                initKeyword(requestRecord, supportSourceRecord, crawlerRecords);
            }
        }

//        requestRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        getSycItemUrlRecord(requestRecord,crawlerRecords);

        return crawlerRecords;
    }

    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();

        if (!httpPage.isDownloadSuccess() || httpPage.getStatusCode() != 200) {
            log.error("{} status code : [{}]",requestUrl,httpPage.getStatusCode());
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if (requestUrl.matches(SEARCH_URL)) {
            searchUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (requestUrl.matches(MODULE_URL)) {
            moduleUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (requestUrl.matches(ITEM_URL)) {
            itemUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (requestUrl.matches(COMMENT_URL)) {
            commentUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if(requestUrl.matches(COMMENT_REPLY_URL)){
            commentReplyUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }

        return parsedLinks;
    }

    private void searchUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String keyword = (String) httpPage.getRequest().getExtras().get("keyword");
        String requestUrl = httpPage.getRequest().getUrl();
        if (!requestUrl.contains("page")) {
            List<String> all = httpPage.getHtml().xpath("//div[@class=\"med card-section\"]/p[1]//text()").all();
            if (all != null && all.size() > 0) {
                StringBuffer sbf = new StringBuffer();
                for (String s : all) {
                    sbf.append(s);
                }
                log.error(sbf.toString());
                return;
            }
        }

        //翻页
        String cookie = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("Cookie");
        if (StringUtils.isBlank(cookie)) {
            log.error("cookie can not null !");
            return;
        }

        String nextUrl = httpPage.getHtml().xpath("//a[@class=\"next\"]/@href").get();
        if (StringUtils.isNotBlank(nextUrl)) {
            nextUrl = SEARCH_PRIFIX + StringEscapeUtils.unescapeHtml(nextUrl);
            CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHead("Referer", requestUrl)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            turnRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
            parsedLinks.add(turnRecord);
        }

        crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove("Cookie");
        //详情页
        List<Selectable> nodes = httpPage.getHtml().xpath("//form[@name=\"form_sub\"]/table/tbody[last()]/tr").nodes();
        for (Selectable node : nodes) {
            String itemUrl = node.xpath("./td[@class=\"p_title\"]/a/@href").get();
            if (StringUtils.isBlank(itemUrl)) {
                continue;
            }

            String releaseTime = node.xpath("./td[last()-2]/text()").get();
            if (StringUtils.isBlank(releaseTime)) {
                continue;
            }

            try {
                long releaseTimeToLong = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm").getTime();
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .recordKey(itemUrl + keyword)
                        .releaseTime(releaseTimeToLong)
                        .httpHead("Cookie", cookie)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
    }

    private void moduleUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String requestUrl = httpPage.getRequest().getUrl();
        //翻页
        String nextUrl = httpPage.getHtml().xpath("//li[contains(@class,'hupu-rc-pagination-item-active')]/following-sibling::li[1]/a/text()").get();
        if (StringUtils.isNotBlank(nextUrl)) {
            nextUrl = requestUrl.substring(0, requestUrl.lastIndexOf("-") + 1) + nextUrl;
            CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(turnRecord);
        }

        List<Selectable> nodes = httpPage.getHtml().xpath("//div[@class=\"bbs-sl-web-post\"]/ul/li").nodes();
        for (Selectable node : nodes) {
            String itemUrl = node.xpath(".//a[@class=\"p-title\"]/@href").get();
            if (StringUtils.isBlank(itemUrl)) {
                continue;
            } else {
                itemUrl = ITEM_PREFIX + itemUrl;
            }

            String releaseTime = node.xpath(".//div[@class=\"post-time\"]/text()").get();
            if (StringUtils.isBlank(releaseTime)) {
                continue;
            }

            try {
                long releaseTimeToLong = washTime(releaseTime);
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(releaseTimeToLong)
                        .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
    }

    private void itemUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String json = httpPage.getHtml().xpath("//input[@id=\"hp-htm-text-2020\"]/@value").get();
        if (StringUtils.isNotBlank(json)) {
            Json json1 = new Json(StringEscapeUtils.unescapeHtml(json));
            httpPage.setJson(json1);
        }
        try {
            httpPage.getJson().jsonPath($_type + ".detail.thread").get();
        } catch (Exception e) {
            log.error(DOMAIN + "item [{}] download is error!will retry!", httpPage.getRequest().getUrl());
            crawlerRequestRecord.getHttpRequest().getHeaders().put("Cookie", COOKIE);
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }

        String requestUrl = httpPage.getRequest().getUrl();
        CrawlerResultTags resultTags = crawlerRequestRecord.tagsCreator().resultTags();
        if (resultTags.hasDataType(interaction)) {

            String id = requestUrl.substring(requestUrl.lastIndexOf("/") + 1, requestUrl.lastIndexOf("."));
            String url = String.format(INTERACTION_SOURCE_URL, id);
            CrawlerRequestRecord interaction = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .build();
            parsedLinks.add(interaction);
        }

        if (resultTags.hasDataType(comment)) {
            resultTags.getCategoryTag().removeLabelTag("comment");

            CrawlerRequestRecord filterRecord = resultTags.getCategoryTag().getKVTagObjVal("comment_filter_record", CrawlerRequestRecord.class);
            if (filterRecord == null) {
                log.error("filter record can not null !");
                return;
            }
            //查找最后一页的评论
            String lastUrl = httpPage.getHtml().xpath("//li[@class=\"hupu-rc-pagination-next\"]/preceding-sibling::li[1]/a/@href").get();
            if (StringUtils.isNotBlank(lastUrl)) {
                lastUrl = requestUrl.substring(0, requestUrl.lastIndexOf("/") + 1) + lastUrl + "#comment";
            } else {
                lastUrl = requestUrl + "#comment";
            }
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(lastUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                    .notFilterRecord()
                    .copyBizTags()
                    .needWashed(true)
                    .resultLabelTag(comment)
                    .build();
            commentRecord.setFilter(filterRecord.getFilter());
            commentRecord.setFilterInfos(filterRecord.getFilterInfos());
            commentRecord.getHttpRequest().addExtra("articleUrl",requestUrl);
            parsedLinks.add(commentRecord);
        }
    }

    private void commentUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String json = httpPage.getHtml().xpath("//input[@id=\"hp-htm-text-2020\"]/@value").get();
        if (StringUtils.isNotBlank(json)) {
            Json json1 = new Json(StringEscapeUtils.unescapeHtml(json));
            httpPage.setJson(json1);
        }

        try {
            httpPage.getJson().jsonPath($_type + ".detail.replies.list").all();
        } catch (Exception e) {
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }

        String requestUrl = httpPage.getRequest().getUrl();
        //翻页
        String nextUrl = httpPage.getHtml().xpath("//li[@class=\"hupu-rc-pagination-prev\"]/a/@href").get();
        if (StringUtils.isNotBlank(nextUrl)) {
            nextUrl = requestUrl.substring(0, requestUrl.lastIndexOf("/") + 1) + nextUrl + "#comment";
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                    .copyBizTags()
                    .needWashed(true)
                    .copyResultTags()
                    .build();
            commentRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
            parsedLinks.add(commentRecord);
        }

        //获取评论的评论
        List<String> allReplies = httpPage.getJson().jsonPath($_type + ".detail.replies.list").all();
        String tid = requestUrl.contains("-") ? requestUrl.substring(requestUrl.lastIndexOf("/") + 1, requestUrl.lastIndexOf("-"))
                : requestUrl.substring(requestUrl.lastIndexOf("/") + 1, requestUrl.lastIndexOf("."));
        for (String allReply : allReplies) {
            JSONObject jsonObject = JSONObject.parseObject(allReply);
            int replyNum = jsonObject.getIntValue("replyNum");
            if (replyNum < 1) {
                continue;
            }
            String pid = jsonObject.getString("pid");
            if (StringUtils.isBlank(pid) || StringUtils.isBlank(tid)) {
                log.error("pid or tid can not null !");
                continue;
            }
            String url = String.format(COMMENT_REPLY_SOURCE_URL, tid, pid);
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            commentRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
            parsedLinks.add(commentRecord);
        }
    }

    private void commentReplyUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        try {
            httpPage.getJson().jsonPath($_type + ".data.list").all();
        } catch (Exception e) {
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpPage internalDownloadPage = internalDownloadRecord.getInternalDownloadPage();
            String views = internalDownloadPage.getHtml().xpath("//body").get();
            views = StringUtils.isBlank(views) ? "0" : views;
            crawlerRecord.getHttpRequest().addExtra("views", views);
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        CrawlerResultTags crawlerResultTags = crawlerRecord.tagsCreator().resultTags();
        String requestUrl = page.getRequest().getUrl();

        if (crawlerResultTags.hasDataType(article)) {
            crawlerDataList.addAll(washArticle(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(comment)) {
            crawlerDataList.addAll(washComment(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(interaction)) {
            crawlerDataList.add(washInteraction(crawlerRecord, page));
        }

        return crawlerDataList;
    }

    private List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.substring(itemUrl.lastIndexOf("/") + 1, itemUrl.lastIndexOf("."));

        JSONObject jsonObject = JSONObject.parseObject(httpPage.getJson().jsonPath($_type + ".detail.thread").get());
        String title = jsonObject.getString("title");
        JSONObject user = jsonObject.getJSONObject("author");
        String author = "";
        String authorId = "";
        if (user != null) {
            author = user.getString("puname");
            authorId = user.getString("puid");
        }
        String content = jsonObject.getString("content");
        StringBuffer contentBf = new StringBuffer();
        List<String> images = new ArrayList<>();
        if (StringUtils.isNotBlank(content)) {
            Html html = new Html(content);
            List<String> all = html.xpath("//text()").all();
            for (String data : all) {
                if (StringUtils.isBlank(data)) {
                    continue;
                }
                contentBf.append(data).append(" ");
            }

            images = html.xpath("//img/@src").all();
        }

        Long releaseTimeToLong = jsonObject.getLong("createdAt");
        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRequestRecord, httpPage)
                .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                .url(itemUrl)
                .releaseTime(releaseTimeToLong)
                .addContentKV(Field_Content, contentBf.toString())
                .addContentKV(Field_Title, title)
                .addContentKV(Field_Author, author)
                .addContentKV(Field_Author_Id, authorId)
                .addContentKV(Field_Images, images.toString())
                .flowInPipelineTag(kafka)
                .build();
        crawlerData.setFilterPipelineResult(true);
        crawlerDataList.add(crawlerData);

        CrawlerData crawlerDataUrl = CrawlerData.builder()
                .data(crawlerRequestRecord, httpPage)
                .dataId(site+articleKey)
                .url(itemUrl)
                .releaseTime(releaseTimeToLong)
                .addContentKV(result_article_url,itemUrl)
                .addContentKV(result_releaseTime, String.valueOf(releaseTimeToLong))
                .addContentKV(Tag_Site_Info, crawlerData.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(Tag_Site_Info))
                .flowInPipelineTag(redis)
                .build();
        crawlerDataUrl.setFilterPipelineResult(true);
        crawlerDataList.add(crawlerDataUrl);

        return crawlerDataList;
    }

    private CrawlerData washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.substring(itemUrl.lastIndexOf("/") + 1, itemUrl.lastIndexOf("."));

        JSONObject jsonObject = JSONObject.parseObject(httpPage.getJson().jsonPath($_type + ".detail.thread").get());
        String views = (String) crawlerRequestRecord.getHttpRequest().getExtras().get("views");
        String comments = httpPage.getHtml().xpath("//span[@class=\"reply\"]/text()[1]").get();
        String likes = httpPage.getHtml().xpath("//span[@class=\"detail-tip\"]/text()").get();
        likes = StringUtils.isBlank(likes) ? "0" : likes;

        Long releaseTimeToLong = jsonObject.getLong("createdAt");
        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRequestRecord, httpPage)
                .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), articleKey))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                .releaseTime(releaseTimeToLong)
                .url(itemUrl)
                .addContentKV(Field_I_Comments, comments)
                .addContentKV(Field_I_Views, views)
                .addContentKV(Field_I_Likes, likes)
                .flowInPipelineTag(kafka)
                .build();
        crawlerData.setFilterPipelineResult(true);

        return crawlerData;
    }

    public List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String requestUrl = httpPage.getRequest().getUrl();
        String articleUrl = (String) httpPage.getRequest().getExtras().get("articleUrl");
        String articleKey = articleUrl.substring(articleUrl.lastIndexOf("/") + 1, articleUrl.lastIndexOf("."));

        List<String> allReplies = requestUrl.matches(COMMENT_REPLY_URL) ? httpPage.getJson().jsonPath($_type + ".data.list").all():  httpPage.getJson().jsonPath($_type + ".detail.replies.list").all();
        List<String> floors = httpPage.getHtml().xpath("//div[@class=\"reply-list-content\"]//span[text()='举报']/following-sibling::a[1]/text()[1]").all();
        for (int i = allReplies.size() - 1; i >= 0; i--) {
            JSONObject jsonObject = JSONObject.parseObject(allReplies.get(i));
            String author = jsonObject.getJSONObject("author").getString("puname");
            String authorId = jsonObject.getString("authorId");
            String content = jsonObject.getString("content");
            List<String> images = new ArrayList<>();
            StringBuffer contentBf = new StringBuffer();
            if (StringUtils.isNotBlank(content)) {
                List<String> all = new Html(content).xpath("//text()").all();
                for (String data : all) {
                    if (StringUtils.isBlank(data)) {
                        continue;
                    }
                    contentBf.append(data).append(" ");
                }
                images = new Html(content).xpath("//img/@src").all();
            }
            String commentId = jsonObject.getString("pid");
            long releaseTimeToLong = jsonObject.getLong("createdAt");
            if(requestUrl.matches(COMMENT_REPLY_URL)){
                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(),site, article.enumVal(), articleKey))
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(),site, comment.enumVal(),comment.enumVal(), commentId))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                        .releaseTime(releaseTimeToLong)
                        .url(requestUrl)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .addContentKV(Field_Author, author)
                        .addContentKV(Field_Author_Id, authorId)
                        .addContentKV(Field_Content, contentBf.toString())
                        .addContentKV(Field_Images, images.toString())
                        .flowInPipelineTag(kafka)
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerDataList.add(crawlerData);
            }else{
                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                        .releaseTime(releaseTimeToLong)
                        .url(requestUrl)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .addContentKV(Field_Author, author)
                        .addContentKV(Field_Author_Id, authorId)
                        .addContentKV(Field_Content, contentBf.toString())
                        .addContentKV(Field_Floor,floors.get(i))
                        .addContentKV(Field_Images, images.toString())
                        .flowInPipelineTag(kafka)
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerDataList.add(crawlerData);

                String comments = jsonObject.getString("replyNum");
                String likes = jsonObject.getString("count");
                CrawlerData crawlerDataInteraction = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment, interaction.enumVal(), commentId))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                        .releaseTime(releaseTimeToLong)
                        .url(requestUrl)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .addContentKV(Field_I_Comments, comments)
                        .addContentKV(Field_Floor,floors.get(i))
                        .addContentKV(Field_I_Likes,likes)
                        .flowInPipelineTag(kafka)
                        .build();
                crawlerDataInteraction.setFilterPipelineResult(true);
                crawlerDataList.add(crawlerDataInteraction);
            }
        }
        return crawlerDataList;
    }

    private static long washTime(String time) throws ParseException {
        long timeToLong = 0;
        if (StringUtils.isBlank(time)) {
            return timeToLong;
        }

        LocalDate now = LocalDate.now();
        if (time.matches("\\d{2}-\\d{2} \\d{2}:\\d{2}")) {
            time = now.getYear() + time;
        }

        timeToLong = DateUtils.parseDate(time, "yyyyMM-dd HH:mm", "yyyy-MM-dd HH:mm").getTime();
        return timeToLong;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                log.error(DOMAIN + " download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        CrawlerRequestRecord crawlerRequestRecord = null;
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        if (crawlerRequestRecord == null) {
            return;
        }

        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void initKeyword(CrawlerRequestRecord requestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> crawlerRecords) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        String requestUrl = requestRecord.getHttpRequest().getUrl();
        String sourceUrl = (String) requestRecord.getHttpRequest().getExtras().get("searchKwSourceUrl");
        if (StringUtils.isEmpty(sourceUrl)) {
            log.error("sourceUrl can not null !");
            return;
        }
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }

        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");

            if (requestUrl.matches(SEARCH_ENTRANCE_URL)) {
                String itemUrl = null;
                try {
                    itemUrl = String.format(sourceUrl, URLEncoder.encode(keyword, "UTF-8"));
                } catch (UnsupportedEncodingException e) {
                    log.error(e.getMessage());
                }
                CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(requestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(System.currentTimeMillis())
                        .httpHead("Referer", requestRecord.getHttpRequest().getUrl())
                        .notFilterRecord()
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                turnRecord.tagsCreator().bizTags().addKeywords(keyword);
                turnRecord.getHttpRequest().addExtra("keyword", keyword);
                crawlerRecords.add(turnRecord);
            } else if (requestUrl.matches(MODULE_ENTRACE_URL)) {
                String itemUrl = String.format(sourceUrl, keyword);
                CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(requestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .httpHeads(requestRecord.getHttpRequest().getHeaders())
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                turnRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info, keyword);
                crawlerRecords.add(turnRecord);
            }
        }
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private void getSycItemUrlRecord(CrawlerRequestRecord requestRecord,List<CrawlerRecord> crawlerRecords){
        if (requestRecord.tagsCreator().bizTags().hasKVTag(DOMAIN_RESULT_JSON_RECORD_TAG)) {
            KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(DOMAIN_RESULT_JSON_RECORD_TAG);
            CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()), CrawlerDomainUrls.class);
            String url = crawlerDomainUrls.getUrl();
            Json urlJson = new Json(url);
            String itemUrl = urlJson.jsonPath($_type + "."+result_article_url).get();
            long releaseTimeToLong = Long.parseLong(urlJson.jsonPath($_type + "." + result_releaseTime).get());
            String siteInfo = urlJson.jsonPath($_type + "." + Tag_Site_Info).get();
            if(!isDateRange(requestRecord,releaseTimeToLong)){
                return ;
            }
            requestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(DOMAIN_RESULT_JSON_RECORD_TAG);

            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(requestRecord)
                    .httpUrl(itemUrl)
                    .releaseTime(releaseTimeToLong)
                    .httpHeads(requestRecord.getHttpRequest().getHeaders())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            itemRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info,siteInfo);
            crawlerRecords.add(itemRecord);
        }
    }
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord, Long releaseTimeToLong) {
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    } else if (hourFromNow != 0) {
                        endTime = System.currentTimeMillis();
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if (startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime) {
                isRange = true;
            }
        } else {
            isRange = true;
        }
        return isRange;
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    public static void main(String[] args) {
        String s = "{&quot;detail&quot;:{&quot;breadCrumb&quot;:[{&quot;title&quot;:&quot;社区&quot;,&quot;url&quot;:&quot;/&quot;},{&quot;title&quot;:&quot;步行街&quot;,&quot;url&quot;:&quot;/all-gambia&quot;},{&quot;title&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;},{&quot;title&quot;:&quot;开学前减肥20天-D...&quot;,&quot;url&quot;:&quot;/41317167.html&quot;}],&quot;user&quot;:{&quot;puid&quot;:&quot;88663029&quot;,&quot;euid&quot;:&quot;261708377898037&quot;,&quot;puname&quot;:&quot;虎扑JR0003389136&quot;,&quot;level&quot;:0,&quot;header&quot;:&quot;https://w1.hoopchina.com.cn/games/images/def_man.png&quot;,&quot;url&quot;:&quot;https://my.hupu.com/261708377898037&quot;,&quot;groupIds&quot;:[-1],&quot;isBlacked&quot;:false,&quot;isAdmin&quot;:false,&quot;ban&quot;:{&quot;bannedGlobally&quot;:false,&quot;bannedAtForum&quot;:false,&quot;autoOpenedGlobally&quot;:false,&quot;autoOpenedAtForum&quot;:false,&quot;banned&quot;:false,&quot;needExam&quot;:false}},&quot;thread&quot;:{&quot;tid&quot;:&quot;41317167&quot;,&quot;title&quot;:&quot;开学前减肥20天-Day7&quot;,&quot;titleBeforeMerge&quot;:&quot;&quot;,&quot;content&quot;:&quot;&lt;p&gt;痿掉的一天。没有运动，饮食随便吃了点也没记录。&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;早上家里装宽带，乡下事情就是多，这里牵根线，那边人不同意，搞东搞西还没搞好，无语。&lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;&lt;img src=\\&quot;https://i2.hoopchina.com.cn/hupuapp/bbs/76502948451920/thread_76502948451920_20210223205507_s_2084423_o_w_1702_h_1276_10123.jpg?x-oss-process=image/resize,w_800\\&quot;/&gt;&lt;/p&gt;&quot;,&quot;videoCover&quot;:&quot;&quot;,&quot;video&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41317167.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:0,&quot;recommend&quot;:0,&quot;read&quot;:1600,&quot;client&quot;:&quot;IPHONE&quot;,&quot;createdAt&quot;:1614084910000,&quot;createdAtFormat&quot;:&quot;1天前&quot;,&quot;repliedAt&quot;:1614084910000,&quot;hasVideo&quot;:false,&quot;authorId&quot;:&quot;33315633&quot;,&quot;author&quot;:{&quot;puid&quot;:&quot;33315633&quot;,&quot;euid&quot;:&quot;76502948451920&quot;,&quot;puname&quot;:&quot;乐在其中233&quot;,&quot;level&quot;:22,&quot;header&quot;:&quot;https://i3.hoopchina.com.cn/user/920/76502948451920-4744540168048678271.png@150h_150w_2e&quot;,&quot;url&quot;:&quot;https://my.hupu.com/76502948451920&quot;,&quot;groupIds&quot;:[-1],&quot;isBlacked&quot;:false,&quot;isAdmin&quot;:false},&quot;topicId&quot;:&quot;23&quot;,&quot;fid&quot;:&quot;203&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;cateId&quot;:&quot;1&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;logo&quot;:&quot;https://i3.hoopchina.com.cn/hupupc/bbs/17/223289078965017/thread_223289078965017_20191225174750_1738359650.png&quot;,&quot;url&quot;:&quot;/fit&quot;,&quot;desc&quot;:&quot;这里是跑步与健身爱好者的乐园，更是关心你身体健康的家园！&quot;,&quot;count&quot;:39897,&quot;countText&quot;:&quot;4.0w&quot;,&quot;fid&quot;:&quot;203&quot;},&quot;status&quot;:0,&quot;isLock&quot;:0,&quot;contentType&quot;:1},&quot;lights&quot;:[],&quot;replies&quot;:{&quot;count&quot;:20,&quot;size&quot;:20,&quot;current&quot;:1,&quot;total&quot;:1,&quot;baseUrl&quot;:&quot;/41317167.html&quot;,&quot;list&quot;:[]},&quot;latest&quot;:[{&quot;tid&quot;:&quot;41356207&quot;,&quot;title&quot;:&quot;不懂就问，深蹲卧推硬拉算基础力量么？谁公认的？&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41356207.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:3,&quot;read&quot;:158,&quot;createdAt&quot;:1614237733000,&quot;createdAtFormat&quot;:&quot;41分钟前&quot;,&quot;repliedAt&quot;:1614239652000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;35970035&quot;,&quot;puname&quot;:&quot;语虚小辫&quot;,&quot;url&quot;:&quot;https://my.hupu.com/169559422595174&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41356049&quot;,&quot;title&quot;:&quot;黄景瑜身材没得挑，网友喊话《检察风云》电影赶紧上映&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41356049.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:0,&quot;read&quot;:180,&quot;createdAt&quot;:1614237317000,&quot;createdAtFormat&quot;:&quot;48分钟前&quot;,&quot;repliedAt&quot;:1614237317000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;81322534&quot;,&quot;puname&quot;:&quot;胡大宝SOD秘&quot;,&quot;url&quot;:&quot;https://my.hupu.com/99149485565549&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41355595&quot;,&quot;title&quot;:&quot;适马，不是那个健身的适马了？&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41355595.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:0,&quot;read&quot;:206,&quot;createdAt&quot;:1614236106000,&quot;createdAtFormat&quot;:&quot;1小时前&quot;,&quot;repliedAt&quot;:1614236106000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;66223134&quot;,&quot;puname&quot;:&quot;工机哥与闲鱿鱼&quot;,&quot;url&quot;:&quot;https://my.hupu.com/241200922124378&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41354577&quot;,&quot;title&quot;:&quot;这个腹是什么水平？&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41354577.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:2,&quot;read&quot;:420,&quot;createdAt&quot;:1614233298000,&quot;createdAtFormat&quot;:&quot;1小时前&quot;,&quot;repliedAt&quot;:1614236414000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;76913892&quot;,&quot;puname&quot;:&quot;走走猪&quot;,&quot;url&quot;:&quot;https://my.hupu.com/196300145298979&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41354453&quot;,&quot;title&quot;:&quot;Hxdm，帮忙看下蛋白粉这个价格怎么样&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41354453.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:1,&quot;read&quot;:457,&quot;createdAt&quot;:1614232949000,&quot;createdAtFormat&quot;:&quot;2小时前&quot;,&quot;repliedAt&quot;:1614237720000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;16261004&quot;,&quot;puname&quot;:&quot;被老婆骂后跪着说&quot;,&quot;url&quot;:&quot;https://my.hupu.com/35249022327551&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41353799&quot;,&quot;title&quot;:&quot;最美好的永远是夏天。&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41353799.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:1,&quot;read&quot;:408,&quot;createdAt&quot;:1614231472000,&quot;createdAtFormat&quot;:&quot;2小时前&quot;,&quot;repliedAt&quot;:1614231472000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;88666496&quot;,&quot;puname&quot;:&quot;虎扑JR2133430340&quot;,&quot;url&quot;:&quot;https://my.hupu.com/189153266235610&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41352970&quot;,&quot;title&quot;:&quot;求女健身教练帮男学员做动作的gif&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41352970.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:0,&quot;read&quot;:373,&quot;createdAt&quot;:1614230024000,&quot;createdAtFormat&quot;:&quot;2小时前&quot;,&quot;repliedAt&quot;:1614230024000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;31741269&quot;,&quot;puname&quot;:&quot;我是例外&quot;,&quot;url&quot;:&quot;https://my.hupu.com/131521815888512&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41350459&quot;,&quot;title&quot;:&quot;关于类固醇，形体和力型兼备的浅谈&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41350459.html&quot;,&quot;lights&quot;:3,&quot;replies&quot;:27,&quot;read&quot;:1666,&quot;createdAt&quot;:1614225571000,&quot;createdAtFormat&quot;:&quot;4小时前&quot;,&quot;repliedAt&quot;:1614237582000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;35970035&quot;,&quot;puname&quot;:&quot;语虚小辫&quot;,&quot;url&quot;:&quot;https://my.hupu.com/169559422595174&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41350383&quot;,&quot;title&quot;:&quot;某不知名艺人自称体脂率3%，什么水平&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41350383.html&quot;,&quot;lights&quot;:6,&quot;replies&quot;:30,&quot;read&quot;:7189,&quot;createdAt&quot;:1614225466000,&quot;createdAtFormat&quot;:&quot;4小时前&quot;,&quot;repliedAt&quot;:1614238579000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;19746205&quot;,&quot;puname&quot;:&quot;KB永love&quot;,&quot;url&quot;:&quot;https://my.hupu.com/195513224224315&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41349007&quot;,&quot;title&quot;:&quot;求问众多健身大神，蛋白粉可以怎么吃？&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41349007.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:3,&quot;read&quot;:872,&quot;createdAt&quot;:1614222830000,&quot;createdAtFormat&quot;:&quot;4小时前&quot;,&quot;repliedAt&quot;:1614225210000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;73376372&quot;,&quot;puname&quot;:&quot;XingA666&quot;,&quot;url&quot;:&quot;https://my.hupu.com/29942788844131&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}}],&quot;hot&quot;:[{&quot;tid&quot;:&quot;41356207&quot;,&quot;title&quot;:&quot;不懂就问，深蹲卧推硬拉算基础力量么？谁公认的？&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41356207.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:3,&quot;read&quot;:153,&quot;createdAt&quot;:1614237733000,&quot;createdAtFormat&quot;:&quot;41分钟前&quot;,&quot;repliedAt&quot;:1614239652000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;35970035&quot;,&quot;puname&quot;:&quot;语虚小辫&quot;,&quot;url&quot;:&quot;https://my.hupu.com/169559422595174&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41332943&quot;,&quot;title&quot;:&quot;增肌长痘痘该吃维生素几？&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41332943.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:8,&quot;read&quot;:3617,&quot;createdAt&quot;:1614151384000,&quot;createdAtFormat&quot;:&quot;1天前&quot;,&quot;repliedAt&quot;:1614239564000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;18948801&quot;,&quot;puname&quot;:&quot;莫言默行&quot;,&quot;url&quot;:&quot;https://my.hupu.com/17437371679037&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41329306&quot;,&quot;title&quot;:&quot;如果想攒钱，选健身行业发展明智吗&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41329306.html&quot;,&quot;lights&quot;:3,&quot;replies&quot;:42,&quot;read&quot;:10488,&quot;createdAt&quot;:1614140380000,&quot;createdAtFormat&quot;:&quot;1天前&quot;,&quot;repliedAt&quot;:1614239443000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;87302530&quot;,&quot;puname&quot;:&quot;男生宿舍宿管阿姨&quot;,&quot;url&quot;:&quot;https://my.hupu.com/179374186951234&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41324269&quot;,&quot;title&quot;:&quot;这波和健身房怎么对线？求指导&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41324269.html&quot;,&quot;lights&quot;:3,&quot;replies&quot;:24,&quot;read&quot;:9158,&quot;createdAt&quot;:1614129490000,&quot;createdAtFormat&quot;:&quot;1天前&quot;,&quot;repliedAt&quot;:1614239145000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;16832149&quot;,&quot;puname&quot;:&quot;八十&quot;,&quot;url&quot;:&quot;https://my.hupu.com/229678200725668&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41324344&quot;,&quot;title&quot;:&quot;求教体脂率降到多少适合开始增肌&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41324344.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:9,&quot;read&quot;:4496,&quot;createdAt&quot;:1614129736000,&quot;createdAtFormat&quot;:&quot;1天前&quot;,&quot;repliedAt&quot;:1614239005000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;28833971&quot;,&quot;puname&quot;:&quot;我本嘉宾鼓瑟吹笙&quot;,&quot;url&quot;:&quot;https://my.hupu.com/83289419328506&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41333338&quot;,&quot;title&quot;:&quot;刚刚用体脂秤的数据 &quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41333338.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:4,&quot;read&quot;:1951,&quot;createdAt&quot;:1614152833000,&quot;createdAtFormat&quot;:&quot;1天前&quot;,&quot;repliedAt&quot;:1614238884000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;39555461&quot;,&quot;puname&quot;:&quot;圣迭戈州大名宿kawhi&quot;,&quot;url&quot;:&quot;https://my.hupu.com/57151018982005&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41333399&quot;,&quot;title&quot;:&quot;赛级卧推143kg成功，迟来的幸福，继续努力吧&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41333399.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:32,&quot;read&quot;:3949,&quot;createdAt&quot;:1614153041000,&quot;createdAtFormat&quot;:&quot;1天前&quot;,&quot;repliedAt&quot;:1614238845000,&quot;hasVideo&quot;:true,&quot;author&quot;:{&quot;puid&quot;:&quot;35970035&quot;,&quot;puname&quot;:&quot;语虚小辫&quot;,&quot;url&quot;:&quot;https://my.hupu.com/169559422595174&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41339359&quot;,&quot;title&quot;:&quot;家人们肌肉科技增肌粉六磅这个价格会翻车吗&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41339359.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:12,&quot;read&quot;:12276,&quot;createdAt&quot;:1614171409000,&quot;createdAtFormat&quot;:&quot;19小时前&quot;,&quot;repliedAt&quot;:1614238825000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;41016022&quot;,&quot;puname&quot;:&quot;buleys&quot;,&quot;url&quot;:&quot;https://my.hupu.com/45231633684255&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41350383&quot;,&quot;title&quot;:&quot;某不知名艺人自称体脂率3%，什么水平&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41350383.html&quot;,&quot;lights&quot;:6,&quot;replies&quot;:30,&quot;read&quot;:7182,&quot;createdAt&quot;:1614225466000,&quot;createdAtFormat&quot;:&quot;4小时前&quot;,&quot;repliedAt&quot;:1614238579000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;19746205&quot;,&quot;puname&quot;:&quot;KB永love&quot;,&quot;url&quot;:&quot;https://my.hupu.com/195513224224315&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}},{&quot;tid&quot;:&quot;41354453&quot;,&quot;title&quot;:&quot;Hxdm，帮忙看下蛋白粉这个价格怎么样&quot;,&quot;cover&quot;:&quot;&quot;,&quot;url&quot;:&quot;/41354453.html&quot;,&quot;lights&quot;:0,&quot;replies&quot;:1,&quot;read&quot;:457,&quot;createdAt&quot;:1614232949000,&quot;createdAtFormat&quot;:&quot;2小时前&quot;,&quot;repliedAt&quot;:1614237720000,&quot;hasVideo&quot;:false,&quot;author&quot;:{&quot;puid&quot;:&quot;16261004&quot;,&quot;puname&quot;:&quot;被老婆骂后跪着说&quot;,&quot;url&quot;:&quot;https://my.hupu.com/35249022327551&quot;},&quot;topicId&quot;:&quot;23&quot;,&quot;topic&quot;:{&quot;topicId&quot;:&quot;23&quot;,&quot;name&quot;:&quot;健身区&quot;,&quot;url&quot;:&quot;/fit&quot;}}],&quot;isRecommended&quot;:false,&quot;title&quot;:&quot;开学前减肥20天-Day7 - 虎扑社区&quot;,&quot;keywords&quot;:&quot;开学前减肥20天-Day7&quot;,&quot;desc&quot;:&quot;痿掉的一天。没有运动，饮食随便吃了点也没记录。早上家里装宽带，乡下事情就是多，这里牵根线，那边人不同意，搞东搞西还没搞好，无语。&quot;},&quot;side&quot;:[{&quot;appid&quot;:&quot;tzsports&quot;,&quot;name&quot;:&quot;NBA英雄&quot;,&quot;link&quot;:&quot;https://fairy.mobileapi.hupu.com/game/pc?appid=tzsports&quot;,&quot;image&quot;:&quot;https://w3.hoopchina.com.cn/a9/db/35/a9db354ecf21387f5a6cd5e416f99207002.png&quot;},{&quot;appid&quot;:&quot;qiuwang&quot;,&quot;name&quot;:&quot;荣耀冠军&quot;,&quot;link&quot;:&quot;https://fairy.mobileapi.hupu.com/game/pc?appid=qiuwang&quot;,&quot;image&quot;:&quot;https://w3.hoopchina.com.cn//6b/bd/bc/6bbdbc5bba83b3374651a250949a0db4002.png&quot;},{&quot;appid&quot;:&quot;shzh&quot;,&quot;name&quot;:&quot;山海之痕&quot;,&quot;link&quot;:&quot;https://fairy.mobileapi.hupu.com/game/pc?appid=shzh&quot;,&quot;image&quot;:&quot;https://w4.hoopchina.com.cn//60/3b/dc/603bdccf8c5c945cfae505b4688c0af6002.png&quot;},{&quot;appid&quot;:&quot;nbazc&quot;,&quot;name&quot;:&quot;NBA范特西&quot;,&quot;link&quot;:&quot;https://fairy.mobileapi.hupu.com/gamecentre?appid=nbazc#/nba-journey&quot;,&quot;image&quot;:&quot;http://w2.hoopchina.com.cn/b5/34/ec/b534ecb6ee905f9e9f6a4d44cdbf3543001.jpg&quot;}],&quot;detailErrorInfo&quot;:{&quot;code&quot;:200,&quot;message&quot;:&quot;成功&quot;},&quot;menu&quot;:{&quot;nav&quot;:[{&quot;title&quot;:&quot;虎扑首页&quot;,&quot;href&quot;:&quot;https://www.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;NBA&quot;,&quot;href&quot;:&quot;https://nba.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;社区&quot;,&quot;href&quot;:&quot;https://bbs.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[{&quot;title&quot;:&quot;社区首页&quot;,&quot;href&quot;:&quot;/&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;NBA版&quot;,&quot;href&quot;:&quot;/all-nba&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;电竞版&quot;,&quot;href&quot;:&quot;/all-gg&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;国际足球版&quot;,&quot;href&quot;:&quot;/all-soccer&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;中国足球版&quot;,&quot;href&quot;:&quot;/all-csl&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;影视娱乐版&quot;,&quot;href&quot;:&quot;/all-ent&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;装备版&quot;,&quot;href&quot;:&quot;/all-gear&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;CBA版&quot;,&quot;href&quot;:&quot;/all-cba&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;晒照片&quot;,&quot;href&quot;:&quot;/all-selfie&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;数码版&quot;,&quot;href&quot;:&quot;/all-digital&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;综合体育版&quot;,&quot;href&quot;:&quot;/all-sports&quot;,&quot;active&quot;:false,&quot;children&quot;:[]}]},{&quot;title&quot;:&quot;资讯&quot;,&quot;href&quot;:&quot;http://voice.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;步行街&quot;,&quot;href&quot;:&quot;https://bbs.hupu.com/all-gambia&quot;,&quot;active&quot;:true,&quot;children&quot;:[{&quot;title&quot;:&quot;步行街热帖&quot;,&quot;href&quot;:&quot;/all-gambia&quot;,&quot;active&quot;:true,&quot;children&quot;:[]},{&quot;title&quot;:&quot;步行街主干道&quot;,&quot;href&quot;:&quot;/bxj&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;步行街每日话题&quot;,&quot;href&quot;:&quot;/topic-daily&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;热点区&quot;,&quot;href&quot;:&quot;/highlight-topic&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;理性讨论区&quot;,&quot;href&quot;:&quot;/discuss&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;论美区&quot;,&quot;href&quot;:&quot;/vs-beauty&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;恋爱区&quot;,&quot;href&quot;:&quot;/love&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;不懂就问区&quot;,&quot;href&quot;:&quot;/question&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;职场区&quot;,&quot;href&quot;:&quot;/workplace&quot;,&quot;active&quot;:false,&quot;children&quot;:[]}]},{&quot;title&quot;:&quot;电竞&quot;,&quot;href&quot;:&quot;https://gg.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;国际足球&quot;,&quot;href&quot;:&quot;https://soccer.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;中国足球&quot;,&quot;href&quot;:&quot;https://soccer.hupu.com/china/&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;CBA&quot;,&quot;href&quot;:&quot;https://cba.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;直播&quot;,&quot;href&quot;:&quot;https://live-pc.liangle.com/&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;空间&quot;,&quot;href&quot;:&quot;https://my.hupu.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;路人王&quot;,&quot;href&quot;:&quot;http://www.liangle.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;识货&quot;,&quot;href&quot;:&quot;https://www.shihuo.cn&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;JRs Official&quot;,&quot;href&quot;:&quot;https://hupujrs.taobao.com/&quot;,&quot;active&quot;:false,&quot;children&quot;:[]},{&quot;title&quot;:&quot;NFL&quot;,&quot;href&quot;:&quot;http://www.nflchina.com&quot;,&quot;active&quot;:false,&quot;children&quot;:[]}],&quot;anchor&quot;:{&quot;id&quot;:&quot;41317167&quot;,&quot;type&quot;:&quot;THREAD&quot;,&quot;parent&quot;:{&quot;id&quot;:&quot;23&quot;,&quot;type&quot;:&quot;TOPIC&quot;,&quot;parent&quot;:{&quot;id&quot;:&quot;1&quot;,&quot;type&quot;:&quot;CATEGORY&quot;,&quot;parent&quot;:{}}}}},&quot;env&quot;:&quot;prod&quot;,&quot;euid&quot;:&quot;&quot;}";
        String s1 = StringEscapeUtils.unescapeHtml(s);
        Selectable selectable = new Json(s1).jsonPath($_type + "..detail.thread");
        System.out.println(s1);
    }
}
