package com.chance.cc.crawler.development.scripts.qimai;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import com.chance.cc.crawler.development.scripts.sohu.article.SoHuArticleCrawlerScript;
import com.google.gson.JsonObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

/**
 * @ClassName song
 * @Description TODO
 * @Author ding
 * @Date 2021/9/6 15:08
 * @Version 1.0
 **/
public class QimaiCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(SoHuArticleCrawlerScript.class);
    private static final String domain = "qimai";
    private static final String RECORD_AGAIN_REQUEST = "record_again_request";
    private static final Map<String,String> headMap = new HashMap<>();
    private static final Map<String,String> cookieMap = new HashMap<>();

    private static final String appUrl = "https://www.qimai.cn/app/comment/appid/\\S*/country/cn";
    private static final String appComment = "https://api.qimai.cn/app/comment?analysis=%s&appid=1570277888&country=cn&sword=&sdate=%s&edate=%s";
    private static final String appCommentUrl = "https://api.qimai.cn/app/comment\\S*";
    private static final String appCommentUrl1 = "https://api.qimai.cn/app/comment\\?analysis=\\S*";
    private static final String android = "https://www.qimai.cn/andapp/comment/appid/\\S*/market/\\S*";
    private static final String androidComment = "https://api.qimai.cn/andapp/getCommentList?analysis=%s";
    private static final String androidCommentUrl = "https://api.qimai.cn/andapp/getCommentList\\S*";

    @Override
    public void initUrlRegulars() {
        addUrlRegular(appUrl);
        addUrlRegular(appCommentUrl);
        addUrlRegular(android);
        addUrlRegular(androidCommentUrl);
        addUrlRegular(appCommentUrl1);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 ||  !page.isDownloadSuccess()){
            log.error("page == [{}] || statusCode != 200 and error page = "+ page.getStatusCode());
            if (page.getStatusCode() == 402){
                log.error("参数生成错误,程序终止");
                return parseLinks;
            }
            if (page.getStatusCode() != 404){
                recordAgainRequest(crawlerRecord,parseLinks);
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
       if (url.matches(appUrl)){//苹果
           this.parseAppUrl(crawlerRecord,page,parseLinks);
       }
       if (url.matches(appComment)){
           this.parsePageAppCommentUrl(crawlerRecord,page,parseLinks);
       }
       if (url.matches(android)){//安卓
           this.parseAndroidUrl(crawlerRecord,page,parseLinks);
       }
       if (url.matches(androidCommentUrl)){
           crawlerRecord.setNeedWashPage(true);
       }
        return parseLinks;
    }

    private void parseAndroidUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String android = android(crawlerRecord);
        String analysis = analysis(android);
        String url = String.format(androidComment,analysis);
        String appid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("appid");
        String market = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("market");
        String turnPage = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("page");
        Map<String,Object> body = new HashMap<>();
        body.put("appid",appid);
        body.put("market",market);
        body.put("page",turnPage);
        this.getTime(body);
        HttpRequestBody form = HttpRequestBody.form(body, "UTF-8");
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .copyBizTags()
                .copyResultTags()
                .build();
        record.getHttpRequest().setMethod("post");
        record.getHttpRequest().setRequestBody(form);
        parseLinks.add(record);
    }

    private void getTime(Map<String, Object> body) {
        Calendar cal=Calendar.getInstance();
        cal.set(Calendar.MONTH,cal.get(Calendar.MONTH)-1);
        Date day = cal.getTime();
        String str = new SimpleDateFormat("yyyy-MM-dd").format(day);
        try {
            Long time = DateUtils.parseDate(str,"yyyy-MM-dd").getTime();
            time = time + 100000000l;
            String sdate1 = String.valueOf(new Timestamp(time));
            String date = sdate1.substring(0,11);//上个月零点零分零秒的毫秒数
            String sdate = date + "00:00:00";
            Long edate1 = System.currentTimeMillis();
            edate1 = edate1 - 100000000l;
            String edate = String.valueOf(new Timestamp(edate1));
            String date1 = edate.substring(0,11);//昨天
            edate = date1 +"23:59:59";//今天23点59分59秒的毫秒数
            body.put("keyword","");
            body.put("start_date",sdate);
            body.put("end_date",edate);
        } catch (ParseException e) {
            e.printStackTrace();
        }

    }


    /*
    * 苹果的评论
    * */
    private void parseAppUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String appStore = appStore();
        String analysis = analysis(appStore);
        String url = String.format(appComment,analysis,appsdate(),appedate());
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .needParsed(true)
                .needWashed(true)
                .copyResultTags()
                .build();
        parseLinks.add(record);
    }
    /*苹果下一页的评论
     * */
    private void parsePageAppCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String code = page.getJson().jsonPath($_type + ".code").get();
        if (!code.equals("10000")){
            log.error("访问评论链接 参数错误");
            return;
        }
        String maxPage = page.getJson().jsonPath($_type + ".maxPage").get();
        if (maxPage.equals("0")){//代表没有评论
            crawlerRecord.setNeedWashPage(false);
            return;
        }else if (Integer.valueOf(maxPage)>=2){
            //存在之后的评论
            int i = 1;
            KVTag page1 = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("page");
            Integer val = (Integer) page1.getVal();
            if (val == 0){
                val = i;
            }
            String appStore = appStore();
            String analysis = analysis(appStore);
            String url = String.format(appComment,analysis,sdate(),edate());
            url = url + "&page"+ val+i;
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .needParsed(true)
                    .needWashed(true)
                    .copyResultTags()
                    .build();
            parseLinks.add(record);
        }
    }
    /*
    * 得到加密的参数值
    * */
    public String analysis(String json){
        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());
        HttpConfig httpConfig = HttpConfig.me(domain);
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setResponseCharset("UTF-8");
        httpRequest.setMethod("post");
        httpRequest.setUrl("http://192.168.1.210:8899/encrypt/qimai");
        Map<String,Object> bodyMap = new HashMap<>();
        bodyMap.put("params",json);
        HttpRequestBody form = HttpRequestBody.form(bodyMap, "UTF-8");
        httpRequest.setRequestBody(form);
        HttpPage download = downloader.download(httpRequest, httpConfig);
        String rawText = download.getRawText();
        return rawText;
    }
    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> listData = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(appCommentUrl)){
            this.washAppArticle(crawlerRecord,page,listData);
        }
        if (url.matches(androidCommentUrl)){
           this.washAndroidArticle(crawlerRecord,page,listData);
        }
        return listData;
    }

    private void washAndroidArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listData) {
        String code = page.getJson().jsonPath($_type + ".code").get();
        if (!code.equals("10000")){
            log.error("参数错误，访问失败");
            return;
        }
        List<String> all = page.getJson().jsonPath($_type + ".result").all();
        for (String s :all){
            JSONObject jsonObject = JSONObject.parseObject(s);
            String comment_info = jsonObject.getString("comment_info");//内容
            String nick_name = jsonObject.getString("nick_name");//作者
            String rating = jsonObject.getString("rating");//星级
            String comment_time = jsonObject.getString("comment_time");
            String phone = jsonObject.getString("phone");
            Long releaseTime = 0l;
            try {
                releaseTime = DateUtils.parseDate(comment_time, "yyyy-MM-dd HH:mm:ss").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }
            if (releaseTime ==0l){
                releaseTime =System.currentTimeMillis();
            }
            if (!isDateRange(crawlerRecord,releaseTime)){
                continue;
            }
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,new Random().nextInt(10000000)))
                    .releaseTime(releaseTime)
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .addContentKV(AICCommonField.Field_Author,nick_name)
                    .addContentKV(AICCommonField.Field_Content,comment_info)
                    .addContentKV(AICCommonField.Field_Score,rating)
                    .addContentKV(AICCommonField.Field_Equipment,phone)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            listData.add(crawlerData);
        }


    }

    private void washAppArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listData) {
        List<String> all = page.getJson().jsonPath($_type + ".appComments").all();
        for (String s :all){
            JSONObject jsonObject = JSONObject.parseObject(s);
            String id = jsonObject.getString("id");//评论id
            String rating = jsonObject.getString("rating");//评论星级
            String date = jsonObject.getString("date");
            long releaseTime = 0l;
            try {
                releaseTime= DateUtils.parseDate(date, "yyyy-MM-dd HH:mm:ss").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }
            if (releaseTime==0l){
                releaseTime = System.currentTimeMillis();
            }
            JSONObject comment = jsonObject.getJSONObject("comment");
            String name = comment.getString("name");//作者姓名
            String title = comment.getString("title");//标题
            String body = comment.getString("body");//内容
            String user_review_id = comment.getString("user_review_id");//作者id
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,id))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .releaseTime(releaseTime)
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .addContentKV(AICCommonField.Field_Author,name)
                    .addContentKV(AICCommonField.Field_Content,body)
                    .addContentKV(AICCommonField.Field_Title,title)
                    .addContentKV(AICCommonField.Field_Author_Id,user_review_id)
                    .addContentKV(AICCommonField.Field_Score,rating)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            listData.add(crawlerData);
        }

    }


    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return true;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }


    private static void recordAgainRequest(CrawlerRequestRecord crawlerRequestRecord,List<CrawlerRequestRecord> parseList){
        int count = 0;
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(RECORD_AGAIN_REQUEST)){
            count = Integer.valueOf(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(RECORD_AGAIN_REQUEST));
            if (count >= 10){
                log.error("url excessive number of repeated downloads this url = "+url);
            }
        }else{
            count = 1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .releaseTime(System.currentTimeMillis())
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey()+count)
                .copyResultTags()
                .copyBizTags()
                .build();

        record.getHttpRequest().setCookies(crawlerRequestRecord.getHttpRequest().getCookies());
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        record.getHttpRequest().setHeaders(crawlerRequestRecord.getHttpRequest().getHeaders());
        record.setNeedParsedPage(crawlerRequestRecord.isNeedParsedPage());
        record.setNeedWashPage(crawlerRequestRecord.isNeedWashPage());
        record.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count++);
        parseList.add(record);

    }

    static {
        headMap.put("Date","Mon, 06 Sep 2021 06:15:00 GMT");
        headMap.put("Content-Type","application/json; charset=utf-8");
        headMap.put("Transfer-Encoding","chunked");
        headMap.put("Connection","keep-alive");
        headMap.put("Set-Cookie","tgw_l7_route=29ef178f2e0a875a4327cbfe5fbcff7e; Expires=Mon, 06-Sep-2021 06:15:30 GMT; Path=/");
        headMap.put("Set-Cookie","PHPSESSID=3tnbk0khaac5iiu96m7cbdi9a7; path=/; domain=qimai.cn");
        headMap.put("Server","openresty/1.15.8.3");
        headMap.put("X-Powered-By","PHP/7.2.33");
        headMap.put("Access-Control-Expose-Headers","Server-U, Server-Time");
        headMap.put("Access-Control-Allow-Credentials","true");
        headMap.put("Access-Control-Allow-Origin","https://www.qimai.cn");
        headMap.put("Expires","Thu, 19 Nov 1981 08:52:00 GMT");
        headMap.put("Cache-Control","no-store, no-cache, must-revalidate");
        headMap.put("Pragma","no-cache");
        headMap.put("Server-Time","1630908900000");
        headMap.put("Content-Encoding","gzip");
        cookieMap.put("synct", String.valueOf(new StringBuffer(String.valueOf(System.currentTimeMillis())).insert(10,".")));
        cookieMap.put("PHPSESSID","3tnbk0khaac5iiu96m7cbdi9a7");
        cookieMap.put("tgw_l7_route","29ef178f2e0a875a4327cbfe5fbcff7e");
    }
    public String appsdate(){
        long current=System.currentTimeMillis();//当前时间毫秒数
        current= current-100000000;
        //long zero=current/(1000*3600*24)*(1000*3600*24)- TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数
        String sdate1 = String.valueOf(new Timestamp(current));
        String date = sdate1.substring(0,11);//今天零点零分零秒的毫秒数
        String sdate = date + "00:00:00";
        sdate = sdate.replaceAll(" ","%20");
        return sdate;
    }
    public String appedate(){
        long current=System.currentTimeMillis();//当前时间毫秒数
        current= current-100000000;
        //long zero=current/(1000*3600*24)*(1000*3600*24)- TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数
        String sdate1 = String.valueOf(new Timestamp(current));
        String date = sdate1.substring(0,11);//今天零点零分零秒的毫秒数
        String edate = date +"23:59:59";//今天23点59分59秒的毫秒数
        edate = edate.replaceAll(" ","%20");
        return edate;
    }
    public String sdate(){
        long current=System.currentTimeMillis();//当前时间毫秒数
        long zero=current/(1000*3600*24)*(1000*3600*24)- TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数
        String sdate1 = String.valueOf(new Timestamp(zero));
        String date = sdate1.substring(0,11);//今天零点零分零秒的毫秒数
        String sdate = date + "00:00:00";
        sdate = sdate.replaceAll(" ","%20");
        return sdate;
    }
    public String edate(){
        long current=System.currentTimeMillis();//当前时间毫秒数
        long zero=current/(1000*3600*24)*(1000*3600*24)- TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数
        String sdate1 = String.valueOf(new Timestamp(zero));
        String date = sdate1.substring(0,11);//今天零点零分零秒的毫秒数
        String edate = date +"23:59:59";//今天23点59分59秒的毫秒数
        edate = edate.replaceAll(" ","%20");
        return edate;
    }
    public String  appStore(){
        JsonObject e = new JsonObject();
        JsonObject params = new JsonObject();
        String baseUrl = "https://api.qimai.cn";
        long current=System.currentTimeMillis();//当前时间毫秒数
        current = current-100000000;
        //long zero=current/(1000*3600*24)*(1000*3600*24)- TimeZone.getDefault().getRawOffset();//今天零点零分零秒的毫秒数
        String sdate1 = String.valueOf(new Timestamp(current));
        String date = sdate1.substring(0,11);//今天零点零分零秒的毫秒数
        String sdate = date + "00:00:00";
        String edate = date +"23:59:59";//今天23点59分59秒的毫秒数
        String url = "/app/comment";
        String appid = "1570277888";
        String country = "cn";
        String sword = "";
        e.addProperty("baseUrl",baseUrl);
        e.addProperty("url",url);
        params.addProperty("appid",appid);
        params.addProperty("country",country);
        params.addProperty("sword",sword);
        params.addProperty("sdate",sdate);
        params.addProperty("edate",edate);
        e.add("params",params);
        String s = e.toString();
        return  s;
    }
    /*
    * 安卓
    * */
    public String android(CrawlerRequestRecord crawlerRecord){
        String url = crawlerRecord.getHttpRequest().getUrl();
        String[] split = url.split("/");
        String market = split[split.length-1];
        String appid = split[split.length-3];
        JsonObject e = new JsonObject();
        JsonObject params = new JsonObject();
        String baseUrl = "https://api.qimai.cn";
        String commentUrl = "/andapp/getCommentList";
        e.addProperty("baseUrl",baseUrl);
        e.addProperty("url",commentUrl);
        params.addProperty("appid",appid);
        params.addProperty("market",market);
        e.add("params",params);
        String s  = e.toString();
        crawlerRecord.tagsCreator().bizTags().addCustomKV("market",market);
        crawlerRecord.tagsCreator().bizTags().addCustomKV("appid",appid);
        crawlerRecord.tagsCreator().bizTags().addCustomKV("page","1");
        return s;
    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }

}
