package com.chance.cc.crawler.development.scripts.weixin.api;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConstant;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.net.URLDecoder;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.interaction;

/**
 * @author lt
 * @version 1.0
 * @date 2021-03-17 16:32:53
 * @email okprog@sina.com
 */
public class WeiXinApiSearchKWCrawlerScript extends CrawlerCommonScript {

    private static Logger logger = LoggerFactory.getLogger(WeiXinApiSearchKWCrawlerScript.class);

    public static final String indexRegex = "https://weixin\\.qq\\.com/";
    public static final String keysRegex = "https?://\\S*v1/meta/weixin/keys\\S*";
    public static final String searchKWUrlRegex = "https://api\\.newrank\\.cn/api/custom/ipsos/v2/weixin/data/search";
    public static final String searchAuthorUrlRegex = "https://api\\.newrank\\.cn/api/sync/weixin/account/articles_content";
    public static final String interactionUrlRegex = "https://api\\.newrank\\.cn/api/custom/ipsos/weixin/data/back";

    public static final String searchKWUrl = "https://api.newrank.cn/api/custom/ipsos/v2/weixin/data/search";
    public static final String searchAuthorUrl = "https://api.newrank.cn/api/sync/weixin/account/articles_content";
    public static final String interactionUrl = "https://api.newrank.cn/api/custom/ipsos/weixin/data/back";

    /*
    POST请求参数key
     */
    public static final String TOKEN_KEY = "key";
    public static final String TOKEN = "vf94a4bdb7b0e49bba2e5z6jt";
    public static final String SHOULD = "should";
    public static final String EXPRESSION = "expression";
    public static final String ACCOUNT = "account";
    public static final String FROM = "from";
    public static final String TO = "to";
    public static final String PAGE = "page";
    public static final String SIZE = "size";
    public static final String UUID = "uuid";

    public static final String IS_OUTPUT_Interaction = "is_output_Interaction";
    public static final String INTERACTION_DATA_DAY_FROM_START_TIME = "interaction_data_day_from_start_time";
    public static final String ARTICLE_DATA_DAY_FROM_NOW_TIME = "article_data_day_from_now_time";

    private static final String scriptSite = "apikw";

    public static final String kwSite = "medical_keyword";
    public static final String authorSite = "medical_weixin_uid";

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)){
            try {
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                if (jsonObject.getIntValue("status") == 0){
                    JSONArray objects = jsonObject.getJSONArray("content");
                    List<Object> keywords = new ArrayList<>();
                    if (keywordUrl.endsWith(kwSite)){
                        CrawlerRequestRecord searchKwRecord = getSearchKwRecord(requestRecord, objects, keywords);
                        allItemRecords.add(searchKwRecord);
                        CategoryTag categoryTag = requestRecord.tagsCreator().scheduleTags().getCategoryTag();
                        if (categoryTag.isContainKVTag(IS_OUTPUT_Interaction)){
                            CrawlerRequestRecord interactionKwRecord = getSearchInteractionKwRecord(requestRecord, objects, keywords);
                            allItemRecords.add(interactionKwRecord);
                        }
                    }
                    if (keywordUrl.endsWith(authorSite)){
                        for (Object object : objects) {
                            CrawlerRequestRecord searchAuthorRecord = getSearchAuthorRecord(requestRecord, (JSONObject) object);
                            allItemRecords.add(searchAuthorRecord);
                        }
                    }
                }
            }catch (Exception e){
                logger.error(e.getMessage(),e);
            }
        }
        if (allItemRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allItemRecords;
    }

    private CrawlerRequestRecord getSearchAuthorRecord(CrawlerRequestRecord requestRecord, JSONObject object) {
        String keyword = object.getString("keyword");
        List<FilterInfo> filterInfos = requestRecord.getFilterInfos();
        int hourFromNow = 0;
        for (FilterInfo filterInfo : filterInfos) {
            if (filterInfo.getFilter().enumVal().equals(CrawlerEnum.CrawlerRecordFilter.dateRange.enumVal())){
                hourFromNow = filterInfo.getHourFromNow();
            }
        }
        long endTimeMillis = System.currentTimeMillis();
        long startTimeMillis = endTimeMillis - (hourFromNow * 60 * 60 * 1000L);
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date startDate = new Date(startTimeMillis);
        Date endDate = new Date(endTimeMillis);
        String startTime = sdf.format(startDate);
        String endTime = sdf.format(endDate);

        JSONObject bodyParam = new JSONObject();
        bodyParam.put(ACCOUNT,keyword);
        bodyParam.put(FROM,startTime);
        bodyParam.put(TO,endTime);
        bodyParam.put(PAGE,"1");
        bodyParam.put(SIZE,"20");

        logger.info("weixin api start crawler author : [{}] data",keyword);
        CrawlerRequestRecord searchKwRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(requestRecord)
                .httpUrl(searchAuthorUrl)
                .recordKey(StringUtils.joinWith("-",searchAuthorUrl,endTimeMillis))
                .releaseTime(endTimeMillis)
                .notFilterRecord()
                .copyBizTags()
                .resultLabelTag(article)
                .needParsed(true)
                .needWashed(true)
                .build();
        HttpRequest httpRequest = searchKwRecord.getHttpRequest();
        httpRequest.setMethod(HttpConstant.Method.POST);
        httpRequest.addHeader(TOKEN_KEY,TOKEN);
        httpRequest.setRequestBody(HttpRequestBody.form(bodyParam.toJavaObject(Map.class),"utf-8"));
        searchKwRecord.tagsCreator().bizTags().addKeywords(keyword);
        return searchKwRecord;
    }

    private CrawlerRequestRecord getSearchKwRecord(CrawlerRequestRecord requestRecord, JSONArray objects, List<Object> keywords) {
        for (Object object : objects) {
            String keyword = ((JSONObject)object).getString("keyword");
            if (keyword.contains(" ")){
                JSONObject must = new JSONObject();
                String[] strings = keyword.split(" ");
                List<String> keys = new ArrayList<>(Arrays.asList(strings));
                must.put("must",keys);
                keywords.add(must);
                continue;
            }
            keywords.add(keyword);
        }
        JSONObject expression = new JSONObject();
        expression.put(SHOULD,keywords);
        logger.info("keywords format : {}",expression);
        List<FilterInfo> filterInfos = requestRecord.getFilterInfos();
        int hourFromNow = 0;
        for (FilterInfo filterInfo : filterInfos) {
            if (filterInfo.getFilter().enumVal().equals(CrawlerEnum.CrawlerRecordFilter.dateRange.enumVal())){
                hourFromNow = filterInfo.getHourFromNow();
            }
        }
        long systemTimeMillis = System.currentTimeMillis();
        long endTimeMillis = systemTimeMillis;
        String dataDay = requestRecord.tagsCreator().scheduleTags().getCategoryTag().getKVTagStrVal(ARTICLE_DATA_DAY_FROM_NOW_TIME);
        long startTimeMillis = systemTimeMillis - (hourFromNow * 60 * 60 * 1000L);
        if (StringUtils.isNotBlank(dataDay)){
            Date nowDate = new Date(systemTimeMillis);
            Calendar calendar = Calendar.getInstance();
            calendar.setTime(nowDate);
            calendar.set(Calendar.SECOND, 0);
            calendar.set(Calendar.MINUTE, 0);
            calendar.set(Calendar.HOUR_OF_DAY, 0);
            endTimeMillis = calendar.getTimeInMillis();
            startTimeMillis = endTimeMillis - (Integer.parseInt(dataDay) * 60 * 60 * 24 * 1000L);
        }
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date startDate = new Date(startTimeMillis);
        Date endDate = new Date(endTimeMillis);
        String startTime = sdf.format(startDate);
        String endTime = sdf.format(endDate);

        JSONObject bodyParam = new JSONObject();
        bodyParam.put(EXPRESSION,expression);
        bodyParam.put(FROM,startTime);
        bodyParam.put(TO,endTime);
//        bodyParam.put(FROM,"2021-04-05 00:00:00");
//        bodyParam.put(TO,"2021-04-06 00:00:00");
        bodyParam.put(PAGE,"1");
        bodyParam.put(SIZE,"20");

        CrawlerRequestRecord searchKwRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(requestRecord)
                .httpUrl(searchKWUrl)
                .recordKey(StringUtils.joinWith("-",searchKWUrl,endTimeMillis))
                .releaseTime(endTimeMillis)
                .notFilterRecord()
                .copyBizTags()
                .resultLabelTag(article)
                .needParsed(true)
                .needWashed(false)
                .build();
        HttpRequest httpRequest = searchKwRecord.getHttpRequest();
        httpRequest.setMethod(HttpConstant.Method.POST);
        httpRequest.addHeader(TOKEN_KEY,TOKEN);
        httpRequest.setRequestBody(HttpRequestBody.form(bodyParam.toJavaObject(Map.class),"utf-8"));
        for (Object keyword : keywords) {
            searchKwRecord.tagsCreator().bizTags().addKeywords(keyword.toString());
        }
        return searchKwRecord;
    }

    private CrawlerRequestRecord getSearchInteractionKwRecord(CrawlerRequestRecord requestRecord, JSONArray objects, List<Object> keywords) {
        for (Object object : objects) {
            String keyword = ((JSONObject)object).getString("keyword");
            if (keyword.contains(" ")){
                JSONObject must = new JSONObject();
                String[] strings = keyword.split(" ");
                List<String> keys = new ArrayList<>(Arrays.asList(strings));
                must.put("must",keys);
                keywords.add(must);
                continue;
            }
            keywords.add(keyword);
        }
        JSONObject expression = new JSONObject();
        expression.put(SHOULD,keywords);
        logger.info("keywords format : {}",expression);
        List<FilterInfo> filterInfos = requestRecord.getFilterInfos();
        int hourFromNow = 0;
        for (FilterInfo filterInfo : filterInfos) {
            if (filterInfo.getFilter().enumVal().equals(CrawlerEnum.CrawlerRecordFilter.dateRange.enumVal())){
                hourFromNow = filterInfo.getHourFromNow();
            }
        }
        long systemTimeMillis = System.currentTimeMillis();
        long endTimeMillis = systemTimeMillis;
        String dataDay = requestRecord.tagsCreator().scheduleTags().getCategoryTag().getKVTagStrVal(INTERACTION_DATA_DAY_FROM_START_TIME);
        long startTimeMillis = systemTimeMillis - (hourFromNow * 60 * 60 * 1000L);
        if (StringUtils.isNotBlank(dataDay)){
            Date startDate = new Date(startTimeMillis);
            Calendar calendar = Calendar.getInstance();
            calendar.setTime(startDate);
            calendar.set(Calendar.SECOND, 0);
            calendar.set(Calendar.MINUTE, 0);
            calendar.set(Calendar.HOUR_OF_DAY, 0);
            startTimeMillis = calendar.getTimeInMillis();
            endTimeMillis = startTimeMillis + (Integer.parseInt(dataDay) * 60 * 60 * 24 * 1000L);
        }
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date startDate = new Date(startTimeMillis);
        Date endDate = new Date(endTimeMillis);
        String startTime = sdf.format(startDate);
        String endTime = sdf.format(endDate);

        JSONObject bodyParam = new JSONObject();
        bodyParam.put(EXPRESSION,expression);
        bodyParam.put(FROM,startTime);
        bodyParam.put(TO,endTime);
        bodyParam.put(PAGE,"1");
        bodyParam.put(SIZE,"20");

        CrawlerRequestRecord searchKwRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(requestRecord)
                .httpUrl(searchKWUrl)
                .recordKey(StringUtils.joinWith("-",searchKWUrl,endTimeMillis))
                .releaseTime(endTimeMillis)
                .notFilterRecord()
                .copyBizTags()
                .resultLabelTag(interaction)
                .needParsed(true)
                .needWashed(false)
                .build();
        HttpRequest httpRequest = searchKwRecord.getHttpRequest();
        httpRequest.setMethod(HttpConstant.Method.POST);
        httpRequest.addHeader(TOKEN_KEY,TOKEN);
        httpRequest.setRequestBody(HttpRequestBody.form(bodyParam.toJavaObject(Map.class),"utf-8"));
        for (Object keyword : keywords) {
            searchKwRecord.tagsCreator().bizTags().addKeywords(keyword.toString());
        }
        return searchKwRecord;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        if (404 == httpPage.getStatusCode()){
            logger.info("status code is 404");
            return parsedLinks;
        }
        if (doHttpPageCheck(crawlerRequestRecord,httpPage)){
            crawlerRequestRecord.setNeedWashPage(false);//
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            parsedLinks.add(crawlerRequestRecord);
            return parsedLinks;
        }
        String lastRequestUrl = lastRequest.getUrl();
        if (lastRequestUrl.matches(searchKWUrlRegex) || lastRequestUrl.matches(searchAuthorUrlRegex)){
            return parseSearchListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseSearchListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
        int returnCode = pageObj.getIntValue("code");
        if (returnCode == 0){
            JSONObject dataObj = null;
            JSONArray dataObjs = null;
            try {
                dataObj = pageObj.getJSONObject("data");
                dataObjs = dataObj.getJSONArray("list");
            } catch (Exception e) {
                dataObjs = pageObj.getJSONArray("data");
            }
            if (dataObjs.size() > 0){
                crawlerRequestRecord.setNeedWashPage(true);
                //翻页(用最后一条的时间)
                if (dataObjs.size() == 20){
                    JSONObject leastData = dataObjs.getJSONObject(dataObjs.size() - 1);
                    String publicTime = leastData.getString("publicTime");
                    if (StringUtils.isBlank(publicTime)){
                        publicTime = dataObj.getString("publishTime");
                    }
                    HttpRequestBody requestBody = lastRequest.getRequestBody();
                    byte[] bytesBody = requestBody.getBody();
                    Map<String, Object> urlParams = getUrlParams(URLDecoder.decode(new String(bytesBody)));
                    String jsonString = JSON.toJSONString(urlParams);
                    JSONObject bodyJson = JSONObject.parseObject(jsonString);
                    int page = Integer.parseInt(bodyJson.getString(PAGE));
                    page += 1;
                    bodyJson.put(PAGE,String.valueOf(page));
                    try {
                        CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                                .itemPageRequest(crawlerRequestRecord)
                                .httpUrl(lastRequest.getUrl())
                                .recordKey(StringUtils.joinWith("-",lastRequest.getUrl(),System.currentTimeMillis()))
                                .releaseTime(DateUtils.parseDate(publicTime,"yyyy-MM-dd HH:mm:ss").getTime())
                                .copyBizTags()
                                .copyResultTags()
                                .notFilterRecord()
                                .needParsed(true)
                                .needWashed(false)
                                .build();
                        HttpRequest httpRequest = nextRecord.getHttpRequest();
                        httpRequest.setMethod(HttpConstant.Method.POST);
                        httpRequest.addHeader(TOKEN_KEY,TOKEN);
                        httpRequest.setRequestBody(HttpRequestBody.form(bodyJson.toJavaObject(Map.class),"utf-8"));
                        crawlerRequestRecord.getHttpConfig().setResponseTextGenerateHtml(false);
                        parsedLinks.add(nextRecord);
                    } catch (ParseException e) {
                        logger.error(e.getMessage());
                    }
                }
            }else {
                logger.info("page return no data");
            }
        }else {
            logger.error("search page has error,the return code is {}",returnCode);
        }
        return parsedLinks;
    }

    /**
     * 检查页面下载是否成功、完整
     * @param crawlerRequestRecord last record
     * @param httpPage page
     * @return boolean for page check
     */
    private boolean doHttpPageCheck(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        int statusCode = httpPage.getStatusCode();
        if (statusCode != 200){
            logger.error("download page {} error, status code is {}",lastRequestUrl,statusCode);
            return true;
        }
        if (!httpPage.isDownloadSuccess()){
            logger.error("download page failed, check your link {}",lastRequestUrl);
            return true;
        }
        if (StringUtils.isBlank(httpPage.getRawText())){
            logger.error("download page empty, check your link {}",lastRequestUrl);
            return true;
        }
        try {
            JSONObject.parseObject(httpPage.getRawText());
        }catch (Exception e){
            logger.error("page rawtext can't parse to JSON, check your link {}",lastRequestUrl);
            return true;
        }
        return false;
    }


    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (!doHttpPageCheck(crawlerRequestRecord,httpPage)){
            CrawlerResultTags crawlerResultTags = crawlerRequestRecord.tagsCreator().resultTags();
            if (crawlerResultTags.hasDataType(CrawlerEnum.CrawlerDataType.article)){
                crawlerDataList.addAll(washArticle(crawlerRequestRecord,httpPage));
            }
            if (crawlerResultTags.hasDataType(CrawlerEnum.CrawlerDataType.interaction)){
                crawlerDataList.addAll(washInteraction(crawlerRequestRecord,httpPage));
            }

        }
        return crawlerDataList;
    }

    private List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
        JSONArray dataObjs = null;
        try {
            dataObjs = pageObj.getJSONObject("data").getJSONArray("list");
        } catch (Exception e) {
            dataObjs = pageObj.getJSONArray("data");
        }
        for (Object obj : dataObjs) {
            JSONObject dataObj = (JSONObject)obj;
            String articleUrl = dataObj.getString("url");
            String publicTime = dataObj.getString("publicTime");
            if (StringUtils.isBlank(publicTime)){
                publicTime = dataObj.getString("publishTime");
            }
            Map<String, Object> urlParams = getUrlParams(articleUrl);
            if (null != urlParams){
                String biz = (String) urlParams.get("__biz");
                String mid = (String) urlParams.get("mid");

                String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

                try {
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(),biz , mid))
                            .url(articleUrl)
                            .releaseTime(DateUtils.parseDate(publicTime,"yyyy-MM-dd HH:mm:ss").getTime())
                            .content(dataObj.toJSONString())
                            .resultLabelTag(article)
                            .build();
                    crawlerDataList.add(crawlerData);
                } catch (ParseException e) {
                    logger.error(e.getMessage());
                }

            }
        }
        return crawlerDataList;
    }

    private List<CrawlerData> washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
        JSONArray dataObjs = pageObj.getJSONObject("data").getJSONArray("list");
        for (Object obj : dataObjs) {
            JSONObject dataObj = (JSONObject)obj;
            String articleUrl = dataObj.getString("url");
            String publicTime = dataObj.getString("publicTime");
            Map<String, Object> urlParams = getUrlParams(articleUrl);
            if (null != urlParams){
                String biz = (String) urlParams.get("__biz");
                String mid = (String) urlParams.get("mid");

                String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

                try {
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(),biz , mid))
                            .url(articleUrl)
                            .releaseTime(DateUtils.parseDate(publicTime,"yyyy-MM-dd HH:mm:ss").getTime())
                            .content(dataObj.toJSONString())
                            .resultLabelTag(article)
                            .build();
                    crawlerDataList.add(crawlerData);
                } catch (ParseException e) {
                    logger.error(e.getMessage());
                }

            }
        }
        return crawlerDataList;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(searchKWUrlRegex);
        addUrlRegular(searchAuthorUrlRegex);
        addUrlRegular(interactionUrlRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "weixin";
    }

    /**
     * 将url参数转换成map
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = url;
        if (url.contains("?")){
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }
}
