package com.chance.cc.crawler.development.scripts.sohu.article;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.interaction;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;

/**
 * @ClassName song
 * @Description TODO
 * @Author ding
 * @Date 2021/9/2 14:01
 * @Version 1.0
 **/
public class SoHuArticleCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(SoHuArticleCrawlerScript.class);
    private static final String domain = "sohu";
    private static final String site = "article";
    private static final String RECORD_AGAIN_REQUEST = "record_again_request";
    private static final String commentFilter = "comment_record_filter_info";
    private static final String HTTPS = "https:";
    private static final String URL = "http://db.auto.sohu.com/";

    private static final String seriesUrl = "http://db.auto.sohu.com/\\S*/\\S*/";
    private static final String plateListUrl = "http://db.auto.sohu.com/\\S*/\\S*/news_\\S*/page_\\S*.html";
    private static final String plateListUrlS = "https://db.auto.sohu.com/\\S*/\\S*/news_\\S*/page_\\S*.html";
    private static final String articleUrl = "https://www.sohu.com/\\S*/\\S*";

    private static final String commentUrlType = "https://db.auto.sohu.com/api/comment/list\\?newsId=\\S*&pageSize=20&pageNo=\\S*&businessType=article";
    private static final String commentUrl = "https://db.auto.sohu.com/api/comment/list?newsId=%s&pageSize=20&pageNo=%s&businessType=article";

    @Override
    public void initUrlRegulars() {
        addUrlRegular(URL);
        addUrlRegular(seriesUrl);
        addUrlRegular(plateListUrl);
        addUrlRegular(plateListUrlS);
        addUrlRegular(articleUrl);
        addUrlRegular(commentUrlType);

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> listRecord = new ArrayList<>();
        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord :supportSourceRecords){
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")){
                    HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
                    Json json = internalDownloadPage.getJson();
                    String msg = json.jsonPath($_type+".msg").get();
                    if (!"success".equals(msg)){
                        log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
                        return listRecord;
                    }
                    List<String> all = json.jsonPath($_type + ".content").all();
                    for (String data : all) {
                        JSONObject jsonObject = JSONObject.parseObject(data);
                        String keyword = jsonObject.getString("keyword");
                        String startUrl = URL+keyword;
                        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                .itemPageRequest(requestRecord)
                                .httpUrl(startUrl)
                                .releaseTime(System.currentTimeMillis())
                                .copyResultTags()
                                .copyBizTags()
                                .build();
                        listRecord.add(record);
                    }
                }
            }
        }
        return listRecord;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 ||  !page.isDownloadSuccess()){
            log.error("page == [{}] || statusCode != 200 and error page = "+ page.getStatusCode());
            if (page.getStatusCode() != 404){
                recordAgainRequest(crawlerRecord,parseLinks);
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(seriesUrl)){
            this.parsePlate(crawlerRecord,page,parseLinks);
        }
        if (url.matches(plateListUrl) || url.matches(plateListUrlS)){
            this.parseArticleUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(articleUrl)){
            this.getCommentUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(commentUrlType)){
            this.turnPageCommentUrl(crawlerRecord,page,parseLinks);
        }
        return parseLinks;
    }

    private void turnPageCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        KVTag pageNo = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("pageNo");
        Integer val = (Integer) pageNo.getVal();
        String articleId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleId");
        String s = page.getJson().jsonPath($_type + ".data").get();
        JSONObject jsonObject = JSONObject.parseObject(s);
        JSONArray comments = jsonObject.getJSONArray("comments");
        if (comments.size() == 0){
            return;
        }
        String pageComment = String.format(commentUrl, articleId, val+1);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(pageComment)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.getHttpRequest().addHeader("User-Agent",getRandomUA());
        parseLinks.add(record);
    }

    //获得评论url
    private void getCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String url = crawlerRecord.getHttpRequest().getUrl();
        String[] split = url.split("/");
        String article = split[split.length-1];
        String[] split1 = article.split("_");
        String articleId = split1[0];
        int pageNo = 1;
        String commentContent = String.format(commentUrl, articleId, pageNo);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(commentContent)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.getHttpRequest().addHeader("User-Agent",getRandomUA());
        record.tagsCreator().bizTags().addCustomKV("pageNo",pageNo);
        record.tagsCreator().bizTags().addCustomKV("articleId",articleId);
        parseLinks.add(record);
    }

    //解析出文章链接 和 下一页
    private void parseArticleUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String text = page.getHtml().xpath("//*[@id=\"txt_list\"]/li//text()").get();
        if (text == null){//不存在列表数据
            return;
        }
        List<Selectable> nodes = page.getHtml().xpath("//*[@id=\"txt_list\"]/li").nodes();
        for (Selectable node : nodes){
            String time = node.xpath("//*[@id=\"txt_list\"]/li[1]/em//text()").get();
            time = this.getTime(time);//得到文章时间
            Long releaseTime = null;
            try {
                releaseTime = DateUtils.parseDate(time,"yyyy/MM/dd").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }
            if (!isDateRange(crawlerRecord,releaseTime)){
                return;
            }

            if (node.xpath("./a/@href").get() == null){
                continue;
            }
            String articleUrl = HTTPS + node.xpath("./a/@href").get();
            CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(articleUrl)
                    .releaseTime(releaseTime)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            articleRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
            parseLinks.add(articleRecord);
        }
        List<Selectable> turnNodes =page.getHtml().xpath("/html/body/div[9]/div[1]/div/div/ul/li").nodes();
        for (Selectable node : turnNodes){
            String content = node.xpath("./a/text()").get();
            if (content.equals("下一页")){
                if (node.xpath("./a/@href").get() == null){
                    break;
                }
                String turnPageUrl = HTTPS + node.xpath("./a/@href").get();
                CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(turnPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                record.getHttpRequest().addHeader("User-Agent",getRandomUA());
                parseLinks.add(record);
                break;
            }
        }
    }




    /*
    * 解析板块第一页链接
    * */
    private void parsePlate(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        /*
        * 五大板块 1.导购 2.用车 3.新闻 4.车文化 5.行情
        * */
        for (int i = 1; i <= 5 ; i++) {
            String url = crawlerRecord.getHttpRequest().getUrl();
            url = url + "news_"+i+ "/page_1.html";
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .releaseTime(System.currentTimeMillis())
                    .httpUrl(url)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            record.getHttpRequest().addHeader("User-Agent",getRandomUA());
            parseLinks.add(record);
        }
    }


    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> listCrawlerData = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            if (url.matches(articleUrl)){
                this.washArticle(crawlerRecord,page,listCrawlerData);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.interaction)){
            if (url.matches(articleUrl)){
                this.washInteraction(crawlerRecord,page,listCrawlerData);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.comment)){
            if (url.matches(commentUrlType)){
                this.washCommentUrl(crawlerRecord,page,listCrawlerData);
            }
        }

        return listCrawlerData;
    }

    private void washCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listCrawlerData) {
        String articleId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleId");
        String s = page.getJson().jsonPath($_type + ".data").get();
        JSONObject jsonObject = JSONObject.parseObject(s);
        JSONArray comments = jsonObject.getJSONArray("comments");
        if (comments.size() == 0) {
            return;
        }
        for (Object object : comments) {
            JSONObject commentObj = (JSONObject) object;
            String commentId = commentObj.getString("id");
            String time = commentObj.getString("datetime");
            if(!isDateRange(crawlerRecord,Long.valueOf(time))){
                continue;
            }
            String content = commentObj.getString("content");
            String author = commentObj.getString("userName");
            String authorId = commentObj.getString("userId");
            String likes = commentObj.getString("likeCount");
            String commentSize = commentObj.getString("replyCount");

            CrawlerData crawlerCData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .url(articleUrl)
                    .releaseTime(new Long(time))//sohu-comment
                    .dataId(StringUtils.joinWith("-", domain(), comment, commentId))
                    .parentId(StringUtils.joinWith("-", domain(), article, articleId))
                    .addContentKV(Field_Content, content)
                    .addContentKV(Field_Author, author)
                    .addContentKV(Field_Author_Id, authorId)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .resultLabelTag(comment)
                    .build();
            listCrawlerData.add(crawlerCData);

            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {
                CrawlerData crawlerIData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .url(articleUrl)
                        .releaseTime(new Long(time))
                        .dataId(StringUtils.joinWith("-", domain(), interaction, commentId))
                        .parentId(StringUtils.joinWith("-", domain(), comment, commentId))
                        .addContentKV(Field_I_Likes, likes)
                        .addContentKV(Field_I_Comments, commentSize)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .resultLabelTag(interaction)
                        .build();
                listCrawlerData.add(crawlerIData);
            }
        }
    }
    private void washInteraction(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listCrawlerData) {
        String isText = page.getHtml().xpath("/html/body/div[@class=\"content area\"]/div[1]/article//text()").get();
        if (isText == null){
            return;
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        String[] split = url.split("/");
        String dataId = split[split.length-1];
        String[] split1 = dataId.split("_");
        dataId = split1[0];
        String comment = page.getHtml().xpath("//*[@id=\"autoComment\"]/div[1]/div[2]/span[2]/text()").get();
        String views = page.getHtml().xpath("/html/body/div[5]/div[1]/div[1]/div/div[1]/text()").get();
        Matcher mt = Pattern.compile("\\(.*\\)").matcher(views);
        String a = null;
        while (mt.find()) {
            a = mt.group();
        }
        views = a ;
        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRecord,page)
                .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.interaction,dataId))
                .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,dataId))
                .url(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(crawlerRecord.getReleaseTime())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .addContentKV(AICCommonField.Field_I_Views,views)
                .addContentKV(AICCommonField.Field_I_Comments,comment)
                .flowInPipelineTag("kafka")
                .build();
        listCrawlerData.add(crawlerData);
    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listCrawlerData) {
        String isText = page.getHtml().xpath("/html/body/div[@class=\"content area\"]/div[1]/article//text()").get();
        if (isText == null){
            return;
        }
        StringBuffer content = new StringBuffer();
        StringBuffer images = new StringBuffer();
        List<Selectable> nodes = page.getHtml().xpath("/html/body/div[@class=\"content area\"]/div[1]/article/p").nodes();
        for (Selectable node : nodes){
            List<String> contentAll = node.xpath(".//text()").all();
            for (String s : contentAll){
                content.append(s);
            }
            String img = node.xpath("./img/@src").get();
            if (img != null){
                images.append(img).append("\\x01");
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        String[] split = url.split("/");
        String dataId = split[split.length-1];
        String[] split1 = dataId.split("_");
        dataId = split1[0];
        String title = page.getHtml().xpath("/html/body/div[@class=\"content area\"]/div[1]/h3/text()").get();
        String authorUrl = page.getHtml().xpath("/html/body/div[@class=\"content area\"]/div[2]/div[3]/div[1]/div[1]/a[@target=\"_blank\"][2]/@href").get();
        String authorName = page.getHtml().xpath("/html/body/div[@class=\"content area\"]/div[2]/div[3]/div[1]/div[1]/a[@target=\"_blank\"][2]/text()").get();
        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRecord,page)
                .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,dataId))
                .url(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(crawlerRecord.getReleaseTime())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .addContentKV(AICCommonField.Field_Author,authorName)
                .addContentKV(AICCommonField.Field_Title,title)
                .addContentKV(AICCommonField.Field_Content, String.valueOf(content))
                .addContentKV(AICCommonField.Field_Images, String.valueOf(images))
                .flowInPipelineTag("kafka")
                .build();
        crawlerData.setFilterPipelineResult(true);
        listCrawlerData.add(crawlerData);
    }


    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }
    private static void recordAgainRequest(CrawlerRequestRecord crawlerRequestRecord,List<CrawlerRequestRecord> parseList){
        int count = 0;
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(RECORD_AGAIN_REQUEST)){
            count = Integer.valueOf(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(RECORD_AGAIN_REQUEST));
            if (count >= 10){
                log.error("url excessive number of repeated downloads this url = "+url);
            }
        }else{
            count = 1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .releaseTime(System.currentTimeMillis())
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey()+count)
                .copyResultTags()
                .copyBizTags()
                .build();

        record.getHttpRequest().setCookies(crawlerRequestRecord.getHttpRequest().getCookies());
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        record.getHttpRequest().setHeaders(crawlerRequestRecord.getHttpRequest().getHeaders());
        record.setNeedParsedPage(crawlerRequestRecord.isNeedParsedPage());
        record.setNeedWashPage(crawlerRequestRecord.isNeedWashPage());
        record.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count++);
        parseList.add(record);

    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
    @Override
    public String domain() {
        return domain;
    }
    /*
     * 处理文章时间
     * */
    private String getTime(String time) {
        Matcher mt = Pattern.compile("\\(.*\\)").matcher(time);
        String a = null;
        while (mt.find()) {
            a = mt.group();
        }
        a = a.substring(1, a.length() - 1);
        String[] split = a.split("-");
        if (split[1].length() != 2) {
            split[1] = "0" + split[1];
        }
        if (split[2].length() != 2) {
            split[2] = "0" + split[2];
        }
        return  split[0] + "/" + split[1] + "/" + split[2];
    }
    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

}
