package com.chance.cc.crawler.development.scripts.cctv;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.interaction;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2020/11/19 17:05
 * @Description 央视网
 **/
public class CCTVCrawlerScript extends CrawlerCommonScript {

    private Logger log = LoggerFactory.getLogger(CCTVCrawlerScript.class);
    private static final String REQUEST_AGAIN_TAG = "cctv_request_again";
    private static final String SEARCH_KW_SOURCE_URL = "searchKwSourceUrl";

    private static final String ENTRANCE_URL = "https://www.cctv.com/";
    private static final String MODULE_ENTRANCE_URL = "https://\\S*.cctv.com\\S*#";
    private static final String MODULE_URL = "https://\\S*.cctv.com/[a-zA-Z\\/\\.]*";
    private static final String MODULE_JSON_URL = "https://\\S*.cctv.com/2019/07/gaiban/cmsdatainterface/page/\\S*_\\d+.jsonp";

    private static final String URL_PREFIX = "https://search.cctv.com/";
    private static final String TURN_PAGE_URL = "https://search.cctv.com/search.php\\S*";

    private static final String ITEM_URL = "http[s]*://\\S*.cctv.com/\\d{4}/\\d{2}/\\d{2}/\\S*.shtml";
    private static final String INTERACTION_SOURCE_URL = "https://common.itv.cntv.cn/praise/get?type=other&id=%s";
    private static final String INTERACTION_URL = "https://common.itv.cntv.cn/praise/get\\S*";

    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return "cctv";
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular(ENTRANCE_URL);
        addUrlRegular(MODULE_ENTRANCE_URL);
        addUrlRegular(MODULE_URL);
        addUrlRegular(MODULE_JSON_URL);
        addUrlRegular(TURN_PAGE_URL);
        addUrlRegular(ITEM_URL);
        addUrlRegular(INTERACTION_URL);
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     *
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        return true;
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();

        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                initKeyword(requestRecord, supportSourceRecord, crawlerRecords);
            }
        }

        requestRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        return crawlerRecords;
    }

    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();

        if (!httpPage.isDownloadSuccess() || httpPage.getStatusCode() != 200) {
            log.error("{} status code : [{}]",requestUrl,httpPage.getStatusCode());
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if(requestUrl.contains("photo")){
            log.error("photo url [{}] need not to crawler!",requestUrl);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if (requestUrl.matches(ENTRANCE_URL)) {
            entranceUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if(requestUrl.matches(MODULE_ENTRANCE_URL)){
            moduleEntranceUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if(requestUrl.matches(MODULE_URL)){
            moduleUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (requestUrl.matches(TURN_PAGE_URL)) {
            turnPageUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if(requestUrl.matches(MODULE_JSON_URL)){
            moduleJsonUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (requestUrl.matches(ITEM_URL)) {
            itemUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if(requestUrl.matches(INTERACTION_URL)){
            interactionUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return parsedLinks;
    }

    private void entranceUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage,List<CrawlerRequestRecord> parsedLinks){
        List<String> all = httpPage.getHtml().xpath("//div[@class=\"nav_list\"]/div[@class=\"navli\"]/span/a/@href").all();
        for (String modelLink : all) {
            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(modelLink + "#")
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();

            parsedLinks.add(itemRecord);
        }
    }

    private void moduleEntranceUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage,List<CrawlerRequestRecord> parsedLinks){
        List<String> all = httpPage.getHtml().xpath("//div[@class=\"nav_list\"]/div[@class=\"left\"]//a/@href").all();
        for (String url : all) {
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyResultTags()
                    .copyBizTags()
                    .build();
            parsedLinks.add(record);
        }
    }

    private void moduleUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage,List<CrawlerRequestRecord> parsedLinks){
        String requestUrl = httpPage.getRequest().getUrl();
        String url = getString("jsonpurl=\"\\S*\"", httpPage.getRawText());
        if(StringUtils.isBlank(url)){
            log.error("can not get json url [{}]" ,requestUrl);
            crawlerRequestRecord.setNeedWashPage(false);
            return ;
        }
        url = "https:" + url.substring(url.indexOf("\"") + 1,url.lastIndexOf("\""));
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .notFilterRecord()
                .copyBizTags()
                .copyResultTags()
                .build();
        parsedLinks.add(record);
    }

    private void turnPageUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage,List<CrawlerRequestRecord> parsedLinks){
        String keyword = (String) httpPage.getRequest().getExtras().get("keyword");
        String requestUrl = httpPage.getRequest().getUrl();
        if(requestUrl.contains("page=1")){
            String s = httpPage.getHtml().xpath("//h3[@class=\"tit\"]/text()").get();
            if(StringUtils.isBlank(s)){
                requestAgainCrawlerRecord(parsedLinks,crawlerRequestRecord);
                crawlerRequestRecord.setNeedWashPage(false);
                return;
            }
            if(s.contains(keyword)){
                log.error(s);
                crawlerRequestRecord.setNeedWashPage(false);
                return;
            }
        }
        //翻页
        String nextUrl = httpPage.getHtml().xpath("//a[@class=\"page-next\"]/@href").get();
        if (StringUtils.isNotEmpty(nextUrl)) {
            nextUrl = URL_PREFIX + StringEscapeUtils.unescapeHtml(nextUrl);
            CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextUrl)
                    .copyBizTags()
                    .copyResultTags()
                    .releaseTime(System.currentTimeMillis())
                    .build();
            turnRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
            parsedLinks.add(turnRecord);
        }

        //文章详情页
        List<Selectable> nodes = httpPage.getHtml().xpath("//div[@class=\"tright\"]").nodes();
        for (Selectable node : nodes) {
            String itemUrl = node.xpath("./h3/span/@lanmu1").get();
            if (StringUtils.isEmpty(itemUrl)) {
                continue;
            }

            String releaseTime = node.xpath(".//span[@class=\"tim\"]").get();
            if (StringUtils.isEmpty(releaseTime)) {
                continue;
            } else {
                releaseTime = releaseTime.substring(releaseTime.indexOf("：") + 1);
            }

            try {
                long releaseTimeToLong = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .recordKey(itemUrl + keyword)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
    }

    private void moduleJsonUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage,List<CrawlerRequestRecord> parsedLinks){
        String requestUrl = httpPage.getRequest().getUrl();
        //翻页
        String[] split = requestUrl.split("_");
        int page = Integer.parseInt(requestUrl.substring(requestUrl.lastIndexOf("_") + 1,requestUrl.lastIndexOf(".")));
        String nextUrl = split[0] + "_" + (page + 1) + ".jsonp";
        CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parsedLinks.add(turnRecord);

        //详情
        String remove = requestUrl.substring(requestUrl.lastIndexOf("/") + 1, requestUrl.lastIndexOf("_"));
        List<String> all = httpPage.getJson().removePadding(remove).jsonPath($_type + ".data.list").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String itemUrl = jsonObject.getString("url");
            String releaseTime = jsonObject.getString("focus_date");
            long releaseTimeToLong = 0;
            try {
                releaseTimeToLong = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
    }

    private void itemUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage,List<CrawlerRequestRecord> parsedLinks){
        String requestUrl = httpPage.getRequest().getUrl();
        CrawlerResultTags resultTags = crawlerRequestRecord.tagsCreator().resultTags();
        if (resultTags.hasDataType(interaction)) {
            String id = requestUrl.substring(requestUrl.lastIndexOf("/") + 1, requestUrl.lastIndexOf("."));
            String url = String.format(INTERACTION_SOURCE_URL, id);

            CrawlerRequestRecord interactionRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(url)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .copyBizTags()
                    .resultLabelTag(interaction)
                    .build();
            parsedLinks.add(interactionRecord);
            crawlerRequestRecord.tagsCreator().resultTags().getCategoryTag().getLabelTags().remove("interaction");
        }
    }

    private void interactionUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage,List<CrawlerRequestRecord> parsedLinks){
        try{
            httpPage.getJson().jsonPath($_type + ".data.num").get();
        }catch (Exception e){
            requestAgainCrawlerRecord(parsedLinks,crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        CrawlerResultTags crawlerResultTags = crawlerRecord.tagsCreator().resultTags();

        if (crawlerResultTags.hasDataType(article)) {
            crawlerDataList.add(washArticle(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(interaction)) {
            crawlerDataList.add(washInteraction(crawlerRecord, page));
        }

        return crawlerDataList;
    }

    public CrawlerData washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.substring(itemUrl.lastIndexOf("/") + 1, itemUrl.lastIndexOf("."));

        List<String> titles = httpPage.getHtml().xpath("//h1/text()|//h2[@align=\"center\"]/text()").all();
        if (titles.size() < 1) {
            titles = httpPage.getHtml().xpath("//div[@class=\"title\"]/text()").all();
        }
        String title = "";
        if (titles.size() == 1) {
            title = titles.get(0);
        } else if (titles.size() == 3) {
            title = titles.get(1);
        }
        String source = httpPage.getHtml().xpath("//span[@class=\"info\"]/i/a/text()|//div[@class=\"info\"]/a/text()").get();
        String releaseTime = "";

        if (StringUtils.isEmpty(source)) {
            source = httpPage.getHtml().xpath("//div[@class=\"info\"]/text()|//div[@class=\"info\"]/i/text()|//span[@class=\"info\"]/i/text()").get();
            if (StringUtils.isEmpty(source)) {
                source = httpPage.getHtml().xpath("//div[@class=\"biref\"]/span/text()").get();
                source = StringUtils.isEmpty(source) ? "" : source.substring(0, source.indexOf("  "));
            }
        }
        source = source.contains("|") ? source.substring(0,source.indexOf("|")) : source;
        try{
           source =  source.substring(source.indexOf("：") + 1, source.indexOf(" "));
        }catch (Exception e){
            source = source;
        }
        source = source.contains("：") ? source.substring(source.indexOf("：") + 1) :source;

        List<String> all = httpPage.getHtml().xpath("//span[@class=\"info\"]//text()|//div[@class=\"info\"]/span/text()|//div[@class=\"info\"]/text()|//div[@class=\"biref\"]/span/text()").all();
        for (String data : all) {
            String string = getString("\\d{4}\\S*\\d+\\S*\\d+\\S* \\d{2}:\\d{2}[:\\d]*", data);
            if (StringUtils.isNotEmpty(string)) {
                releaseTime = string;
                break;
            }
        }

        List<String> articleTextList = httpPage.getHtml().xpath("//div[@class=\"cnt_bd\"]//p[not(contains(@style,'none'))]//text()|//div[contains(@id,'area')]//p//text()|//div[@class=\"cont\"]/p//text()|//div[@class=\"column_wrapper\"]//p//text()").all();
        StringBuffer conents = new StringBuffer();
        for (String articleText : articleTextList) {
            if (articleText.contains("播放器容器id") || articleText.contains("原标题：")) {
                continue;
            }
            conents.append(articleText).append(" ");
        }

        CrawlerData crawlerData = null;
        try {
            long releaseTimeToLong = StringUtils.isNotEmpty(releaseTime) ? DateUtils.parseDate(releaseTime, "yyyy年MM月dd日 HH:mm", "yyyy-MM-dd HH:mm:ss").getTime() : crawlerRequestRecord.getReleaseTime();
            crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                    .url(itemUrl)
                    .releaseTime(releaseTimeToLong)
                    .addContentKV(Field_Title, title)
                    .addContentKV(Field_Author, source)
                    .addContentKV(Field_Content, conents.toString().trim())
                    .build();
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
        return crawlerData;
    }

    public CrawlerData washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.substring(itemUrl.lastIndexOf("=") + 1);

        Json json = httpPage.getJson();
        String likes = json.jsonPath($_type + ".data.num").get();


        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRequestRecord, httpPage)
                .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), interaction.enumVal(), articleKey))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                .url(itemUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .addContentKV(Field_I_Likes, likes)
                .build();
        return crawlerData;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                log.error("CCIV  download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .notFilterRecord()
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        if (crawlerRequestRecord == null) {
            return;
        }

        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void initKeyword(CrawlerRequestRecord requestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> crawlerRecords) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
        String searchSourceUrl = (String) extras.get(SEARCH_KW_SOURCE_URL);
        if (StringUtils.isBlank(searchSourceUrl)) {
            log.error("search kw source url can not null!");
            return;
        }
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }

        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");

            String itemUrl = null;
            try {
                itemUrl = String.format(searchSourceUrl, URLEncoder.encode(keyword, "UTF-8"));
            } catch (UnsupportedEncodingException e) {
                log.error(e.getMessage());
            }
            CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(requestRecord)
                    .httpUrl(itemUrl)
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .releaseTime(System.currentTimeMillis())
                    .build();
            turnRecord.tagsCreator().bizTags().addKeywords(keyword);
            turnRecord.getHttpRequest().addExtra("keyword",keyword);
            crawlerRecords.add(turnRecord);
        }
    }
    /**
     * 获取指定格式的字符串
     *
     * @param regx
     * @param input
     * @return
     */
    private static String getString(String regx, String input) {
        Pattern compile = Pattern.compile(regx);
        Matcher matcher = compile.matcher(input);
        while (matcher.find()) {
            return matcher.group(0);
        }
        return null;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }


}
