package com.chance.cc.crawler.development.scripts.dangdang;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;

/**
 * @ClassName dangdang
 * @Description TODO
 * @Author songding
 * @Date 2021/10/14 10:01
 * @Version 1.0
 * 当当网 关键词查询
 **/
public class DangdangCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(DangdangCrawlerScript.class);
    public static final String domain = "dangdang";
    private static final String site = "article";
    private static final String RECORD_AGAIN_REQUEST = "record_again_request";

    private static final String DangUrlRegulars = "https://search.dangdang.com/\\S*";
    private static final String listUrlRegulars = "https://search.dangdang.com/\\?key=\\S*";
    private static final String commentRegulars = "https://product.dangdang.com/index.php\\?r=comment\\S*";
    private static final String URL="https://search.dangdang.com/?key=";


    private static final String commentUrl = "https://product.dangdang.com/index.php?r=comment%2Flist";
    private static final String params = "&productId=%s&categoryPath=01.22.01.13.00.00&mainProductId=%s&mediumId=0&pageIndex=%s&sortType=2&filterType=1&isSystem=1&tagId=0&tagFilterCount=0&template=publish&long_or_short=%s";

    private static final String shortRegulars = "http://product.m.dangdang.com/review.php\\?pid=\\S*";
    private static final String longRegulars = "http://product.m.dangdang.com/review.php\\?action=\\S*";

    private static final String shortCommentUrl = "http://product.m.dangdang.com/review.php?pid=%s&main_pid=0&product_medium=0&sort_type=1&action=get_review_html_by_page&page=%s&label_id=0&filter_type=1&focusCurProduct=&first_in=0";
    private static final String longCommentUrl = "http://product.m.dangdang.com/review.php?action=get_long_review_by_page&comment_type=1&pid=%s&main_pid=&product_medium=0&page=%s&sid=241d102535a23e920847fad76ecd2afa";

    private static final Object obj = new Object();

    @Override
    public void initUrlRegulars() {
        addUrlRegular(DangUrlRegulars);
        addUrlRegular(listUrlRegulars);
        addUrlRegular(commentRegulars);
        addUrlRegular(shortRegulars);
        addUrlRegular(longRegulars);
    }
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();//回溯采集方法
        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord :supportSourceRecords){
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")){
                    HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
                    Json json = internalDownloadPage.getJson();
                    String msg = json.jsonPath($_type+".msg").get();
                    if (!"success".equals(msg)){
                        log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
                        return prepareLinks;
                    }
                    List<String> all = json.jsonPath($_type + ".content").all();
                    for (String data : all) {
                        JSONObject jsonObject = JSONObject.parseObject(data);
                        String keyword = jsonObject.getString("keyword");
                        String keywords = null;
                        try {
                            keywords = URLEncoder.encode(keyword,"utf-8");
                        } catch (UnsupportedEncodingException e) {
                            e.printStackTrace();
                        }
                        String startUrl = URL + keywords;
                        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                .itemPageRequest(crawlerRequestRecord)
                                .httpUrl(startUrl)
                                .releaseTime(System.currentTimeMillis())
                                .needWashed(true)
                                .needParsed(true)
                                .copyResultTags()
                                .copyBizTags()
                                .build();
                        record.tagsCreator().bizTags().addCustomKV("keywords",keyword);
                        prepareLinks.add(record);
                    }
                }
            }
        }

        return prepareLinks;
    }
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 ||  !page.isDownloadSuccess() || page.getRawText().equals("")){
            log.error("page == [{}] || statusCode != 200 and error page = "+ page.getStatusCode());
            if (page.getStatusCode() != 404){
                recordAgainRequest(crawlerRecord,parseLinks);
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }else if (page.getStatusCode() == 429){
                synchronized(obj){
                    try {
                        Thread.sleep(1000*15);
                        crawlerRecord.setNeedWashPage(false);
                        return  parseLinks;
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
            }
            else{
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(listUrlRegulars)){
            this.getCommentUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(longRegulars)){
            this.getLongComment(crawlerRecord,page,parseLinks);
        }
        if (url.matches(shortRegulars)){
            this.getShortComment(crawlerRecord,page,parseLinks);
        }
        return parseLinks;
    }
    /*
    * 短评
    * */
    private void getShortComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String id = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        KVTag indexKey = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("index");
        Integer index = (Integer) indexKey.getVal()+1;
        List<String> all = page.getHtml().xpath("/html/body/li[@class=\"comment_item\"]").all();
        if (all == null){
            return;
        }
        if (all.size() == 0){
            return;
        }
        String shortUrl = String.format(shortCommentUrl,index,id);
        CrawlerRequestRecord shortRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(shortUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("User-Agent",getRandomUA())
                .copyBizTags()
                .copyResultTags()
                .build();
        shortRecord.tagsCreator().bizTags().addCustomKV("id",id);
        shortRecord.tagsCreator().bizTags().addCustomKV("index",index);
        parseLinks.add(shortRecord);
    }
    /*
    * 长评
    * */
    private void getLongComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String id = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        KVTag indexKey = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("index");
        Integer index = (Integer) indexKey.getVal() +1;
        try{
            List<String> pageCount = page.getJson().jsonPath($_type + ".review_list").all();
            if (pageCount.size() == 0){
                return;
            }
        }catch (Exception e){
            crawlerRecord.setNeedWashPage(false);
            log.error("page == null");
        }


        String shortUrl = String.format(longCommentUrl,index,id);
        CrawlerRequestRecord longRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(shortUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHead("User-Agent",getRandomUA())
                .copyBizTags()
                .copyResultTags()
                .build();
        longRecord.tagsCreator().bizTags().addCustomKV("id",id);
        longRecord.tagsCreator().bizTags().addCustomKV("index",index);
        parseLinks.add(longRecord);
    }

    private void getCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        List<Selectable> nodes = page.getHtml().xpath("//*[@id=\"component_59\"]/li").nodes();
        int index = 1;
        if (nodes.size() == 0){
            return;
        }
        for (Selectable node : nodes){
            String id = node.xpath("./@id").get();
            String longUrl = String.format(longCommentUrl,id,index);
            String shortUrl = String.format(shortCommentUrl,id,index);
            CrawlerRequestRecord longRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(longUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHead("User-Agent",getRandomUA())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            longRecord.tagsCreator().bizTags().addCustomKV("id",id);
            longRecord.tagsCreator().bizTags().addCustomKV("index",index);
            parseLinks.add(longRecord);
            CrawlerRequestRecord shortRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(shortUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHead("User-Agent",getRandomUA())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            shortRecord.tagsCreator().bizTags().addCustomKV("id",id);
            shortRecord.tagsCreator().bizTags().addCustomKV("index",index);
            parseLinks.add(shortRecord);
        }
        KVTag page_indexKey = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("page_index");
        Integer page_index = 1;
        if (page_indexKey == null){
            page_index = 1;
        }else{
            page_index = (Integer) page_indexKey.getVal()+1;
        }
        String listTurnUrl = "https://search.dangdang.com/?key=nike&page_index="+page_index;
        CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(listTurnUrl)
                .releaseTime(System.currentTimeMillis())
                .httpCookie("sessionID","pc_a3c311832b1f0ee1c8c61a3338d8c980423dc6138f585e37232266cb01be3b83")
                .copyBizTags()
                .copyResultTags()
                .build();
        listRecord.tagsCreator().bizTags().addCustomKV("page_index",page_index);
        parseLinks.add(listRecord);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            if (url.matches(listUrlRegulars)){
                this.washArticle(crawlerRecord,page,dataList);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.comment)){
            if (url.matches(longRegulars)){
                this.washLongComment(crawlerRecord,page,dataList);
            }
            if (url.matches(shortRegulars)){
                this.washShortComment(crawlerRecord,page,dataList);
            }
        }

        return dataList;
    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<Selectable> nodes = page.getHtml().xpath("//*[@id=\"component_59\"]/li").nodes();
        int index = 1;
        if (nodes == null){
            return;
        }
        for (Selectable node : nodes) {
            String id = node.xpath("./@id").get();
            String img = "https:"+node.xpath("./a/img/@src").get();
            String price = node.xpath("./p[@class=\"price\"]/span").get();;
            String title = node.xpath("./p[2]/a/@title").get();
            String url = node.xpath("./p[1]/a/@href").get();
            url = "https:"+url;
            CrawlerData crawlerArticle = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,id))
                    .url(url)
                    .releaseTime(System.currentTimeMillis())
                    .addContentKV(AICCommonField.Field_Title,title)
                    .addContentKV(AICCommonField.Field_Images,img)
                    .addContentKV(AICCommonField.Field_Produce_Price,price)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .build();
            dataList.add(crawlerArticle);
            if (crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("monthly")!=null){
                CrawlerData crawlerArticleUrl = CrawlerData.builder()
                        .data(crawlerRecord,page)
                        .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,id))
                        .url(url)
                        .releaseTime(System.currentTimeMillis())
                        .addContentKV(AICCommonField.Field_Urls,url)
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                        .build();
                dataList.add(crawlerArticleUrl);
            }
        }
    }

    private void washLongComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<String> all = page.getJson().jsonPath($_type + ".review_list").all();

        for (String str : all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String comment_id = jsonObject.getString("comment_id");
            String title = jsonObject.getString("title");
            String content = jsonObject.getString("content");
            String score = jsonObject.getString("score");
            String creation_date = jsonObject.getString("creation_date");
            String cust_name = jsonObject.getString("cust_name");
            String product_id = jsonObject.getString("product_id");

            Long releaseTime = 0l;
            try {
                releaseTime = DateUtils.parseDate(creation_date,"yyyy-MM-dd").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }
            if (!isDateRange(crawlerRecord,releaseTime)){
                continue;
            }
            CrawlerData crawlerArticle = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,comment_id))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,product_id))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(releaseTime)
                    .addContentKV(AICCommonField.Field_Author,cust_name)
                    .addContentKV(AICCommonField.Field_Title,title)
                    .addContentKV(AICCommonField.Field_Content,content)
                    .addContentKV(AICCommonField.Field_Score,score)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .build();
            dataList.add(crawlerArticle);
        }

    }

    private void washShortComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        List<Selectable> nodes = page.getHtml().xpath("//li[@class=\"comment_item\"]").nodes();
        String pid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        for (Selectable node : nodes){
            List<String> scores = node.xpath("//span[@class=\"star_num\"]/text()").all();//评分
            List<String> contents = node.xpath("//p[@class=\"review_text j_review_text\"]//a[@class=\"j_review_text_div review_text_div\"]").all();;//内容
            List<String> ids = node.xpath("//@comment_id").all();//id
            List<String> times = node.xpath("//span[@class=\"date\"]").all();//时间
            List<String> authorNames  =node.xpath("//a[@class=\"name_text\"]").all();;//用户名
            for (int i = 0; i <scores.size() ; i++) {
                String time = times.get(i);
                Long releaseTime = 0l;
                try {
                    releaseTime = DateUtils.parseDate(time,"yyyy.MM.dd").getTime();
                } catch (ParseException e) {
                    e.printStackTrace();
                }
                if (!isDateRange(crawlerRecord,releaseTime)){
                    continue;
                }
                CrawlerData crawlerArticle = CrawlerData.builder()
                        .data(crawlerRecord,page)
                        .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,ids.get(i)))
                        .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,pid))
                        .url(crawlerRecord.getHttpRequest().getUrl())
                        .releaseTime(releaseTime)
                        .addContentKV(AICCommonField.Field_Author,authorNames.get(i))
                        .addContentKV(AICCommonField.Field_Content,contents.get(i))
                        .addContentKV(AICCommonField.Field_Score,scores.get(i).substring(0,scores.get(i).length()-1))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                        .build();
                dataList.add(crawlerArticle);
            }
            break;
        }
    }

    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
    private static void recordAgainRequest(CrawlerRequestRecord crawlerRequestRecord,List<CrawlerRequestRecord> parseList){
        int count = 0;
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(RECORD_AGAIN_REQUEST)){
            count = Integer.valueOf(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(RECORD_AGAIN_REQUEST));
            if (count >= 10){
                log.error("url excessive number of repeated downloads this url = "+url);
            }
        }else{
            count = 1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .releaseTime(System.currentTimeMillis())
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey()+count)
                .httpHead("User-Agent",getRandomUA())
                .copyResultTags()
                .copyBizTags()
                .build();

        record.getHttpRequest().setCookies(crawlerRequestRecord.getHttpRequest().getCookies());
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        record.setNeedParsedPage(crawlerRequestRecord.isNeedParsedPage());
        record.setNeedWashPage(crawlerRequestRecord.isNeedWashPage());
        record.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count++);
        parseList.add(record);

    }
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
    private static List<String> agentList = new ArrayList<>();
    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }
    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }
}
