package com.chance.cc.crawler.development.scripts.xiaohongshu.monitor;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.record.CrawlerResultRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.RandomAccessFile;
import java.util.*;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 */
public class XHSNodeMonitorScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XHSNodeMonitorScript.class);
    public static final String monitorUlr = "/node/xhs-monitor";
    public static final String nodeMonitorFilePathFormat = "/data/chance_crawler_test/logs/node/data-node-node-%s.all";
//    public static final String nodeMonitorFilePathFormat = "D:\\chance\\log\\boostrap\\data-node-bootstrap-%s.all";
    public static final String signalFilePathFormat = "/data/chance_crawler_test/signal/node-log-size-%s.txt";
//    public static final String signalFilePathFormat = "D:\\chance\\log\\signal\\node-log-size-%s.txt";
    public static final String sendEmailUrl = "http://192.168.1.213:9030/api/chance/mail?type=xhs&content=%s";
    public static final String sendContent = "小红书小程序需要滑动验证码";

    public static long lastSendTime = 0;
    public static int downloadRetryCount = 4;
    public static int downloadRetryInterval = 1000;
//    private static Map<String,String> userEmailMap = new HashMap<>();
//    static {
//        userEmailMap.put("baixin","xin.bai@chance-data.com");
//        userEmailMap.put("zhaohuanhuan","huanhuan.zhao@chance-data.com");
//        userEmailMap.put("songding","ding.song@chance-data.com");
//        userEmailMap.put("wangzhaoyi","zaoyi.wang@chang-data.com");
//        userEmailMap.put("guofei","fei.guo@chance-data.com");
//        userEmailMap.put("zengzhou","zhou.zeng@chance-data.com");
//    }


    public List<CrawlerRequestRecord> parseLinks(CrawlerRecordContext context) {
        try {
            long currentTimeMillis = System.currentTimeMillis();
            String currentDate = DateFormatUtils.format(currentTimeMillis, "yyyy-MM-dd");
            String nodeMonitorFilePath = String.format(nodeMonitorFilePathFormat,currentDate);
            Downloader pageDownloader = context.getPageDownloader();

            Map<String,Integer> flagCountMap = new HashMap<>();
            String signalFilePath = String.format(signalFilePathFormat,currentDate);
            File signalFile = new File(signalFilePath);
            long lastTimeFileSize = 0;
            RandomAccessFile randomFile;
            if (!signalFile.exists()){
                //初始文件大小
                randomFile = new RandomAccessFile(nodeMonitorFilePath,"rw");
                lastTimeFileSize = randomFile.length();
                randomFile.seek(lastTimeFileSize);
                IOUtils.write(String.valueOf(lastTimeFileSize),new FileOutputStream(signalFilePath),"utf-8");
                logger.info("node log path {} init random file length {}",nodeMonitorFilePath,lastTimeFileSize);
            } else {
                List<String> nodeLogSize = IOUtils.readLines(new FileInputStream(signalFilePath), "utf-8");
                if (nodeLogSize.size() > 0){
                    lastTimeFileSize = Long.valueOf(nodeLogSize.get(0));
                } else {
                    logger.error("node log size file {} is error!",signalFilePath);
                }
                randomFile = new RandomAccessFile(nodeMonitorFilePath,"rw");
                randomFile.seek(lastTimeFileSize);
                logger.info("node log path {} last time file size {}",nodeMonitorFilePath,lastTimeFileSize);
            }

            try {

                String line;
                int count=0;
                while( (line = randomFile.readLine())!= null) {
                    if (line.contains("Verification code appears")){
                        count++;
                    }
                }
                lastTimeFileSize = randomFile.length();
                IOUtils.write(String.valueOf(lastTimeFileSize),new FileOutputStream(signalFilePath),"utf-8");
                logger.info("node log path {} tail random file length {}",nodeMonitorFilePath,lastTimeFileSize);
                String error_count = context.getCrawlerRecord().tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("error_count");
                if (count > Integer.valueOf(error_count)){ //如果一分钟内用户跳出验证码的提示数据超过${error_count},就发邮件
                    HttpRequest httpRequest = new HttpRequest();
                    //校验验证码
                    if (!checkVerify(pageDownloader)){ //验证不成功发送邮件
                        long endTime = System.currentTimeMillis();
                        GregorianCalendar calendar = new GregorianCalendar();
                        int hour = calendar.get(Calendar.HOUR_OF_DAY);
                        if (hour >= 9 && hour <= 21){
                            httpRequest.setUrl(String.format(sendEmailUrl,sendContent));
                            HttpPage download = pageDownloader.download(httpRequest, HttpConfig.me(domain()));
                            logger.info("email sent result info {}",download.getRawText());
                            lastSendTime = endTime;
                        } else {
                            if ((endTime - lastSendTime) >2*60*60*1000L){ //2小时内只发送一次邮件
                                httpRequest.setUrl(String.format(sendEmailUrl,sendContent));
                                HttpPage download = pageDownloader.download(httpRequest, HttpConfig.me(domain()));
                                logger.info("email sent result info {}",download.getRawText());
                                lastSendTime = endTime;
                            } else {
                                logger.info("有验证码但是不发送邮件！");
                            }
                        }
                    }
                }

            } catch (Exception e) {
                logger.error(e.getMessage(),e);
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
        return null;
    }

    private boolean checkVerify(Downloader pageDownloader){
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl("http://192.168.1.212:9310/xhs/baidu/fverify");
        int retry = 1;
        while (retry <= downloadRetryCount){
            HttpPage httpPage = pageDownloader.download(httpRequest, HttpConfig.me(domain()));
            if (httpPage.isDownloadSuccess()){
                try {
                    String riskLevel = httpPage.getJson().jsonPath("riskLevel").get();
                    logger.info("check verify download response:{}",httpPage.getRawText());
                    if ("PASS".equals(riskLevel)){//通过验证，提交rid
                        String rid = httpPage.getJson().jsonPath("rid").get();
                        return submitRid(rid,pageDownloader);
                    }
                } catch (Exception e) {
                    logger.error("Verification code verification failed: {}",e.getMessage());
                }
            }

            try {
                Thread.sleep(downloadRetryInterval);
            } catch (InterruptedException e) {
                logger.error(e.getMessage());
            }

            retry++;
        }

        logger.error("check verify retry Limit exceeded!");
        return false;
    }

    private boolean submitRid(String rid,Downloader pageDownloader){
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl("https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/shield/captcha");
        httpRequest.addHeader("X-Sign","X3dbc57a720d5b19dbca330d39e2b474e");
        httpRequest.addHeader("Referer","https://smartapps.cn/KuRdr9OR39BqyAGIg7mYK7Bytityu0Vi/2.35.20/page-frame.html");
        httpRequest.addHeader("Content-Type","application/json; charset=utf-8");
        httpRequest.addHeader("Host","www.xiaohongshu.com");
        httpRequest.setMethod("POST");

        String body = "{\"rid\":\""+rid+"\",\"status\":1}";
        httpRequest.setRequestBody(HttpRequestBody.json(body, "utf-8"));
        HttpPage httpPage = pageDownloader.download(httpRequest, HttpConfig.me("submit_rid"));
        if (httpPage.isDownloadSuccess()){
            try {
                String passed = httpPage.getJson().jsonPath("data.passed").get();
                if ("true".equals(passed)){
                    return true;
                }
            } catch (Exception e) {
                logger.error("submit rid fail,error:[{}]]",e.getMessage());
            }
        }
        logger.error("submit rid error response:[{}]",httpPage.getRawText());
        return false;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        return null;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(monitorUlr);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return true;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "xhs-monitor";
    }

    /**
     *
     * 重写 crawlerProcess 以支持 download 下载
     * @param context
     */

    @Override
    public void crawlerProcess(CrawlerRecordContext context) {
        //parse request list
        if (context.getCrawlerRecord().isNeedParsedPage()){
            parsePage(context);
        }

        //need wash request
        if(context.getCrawlerRecord().isNeedWashPage()){
            washResult(context);
        }
    }

    private void parsePage(CrawlerRecordContext context){
        //parse page links
        List<CrawlerRequestRecord> links = parseLinks(context);
        //internal download
        internalDownload(context,links);
        //after internal download
        if (context.hasInternalDownloadLinks()){
            afterInternalDownload(context.getCrawlerRecord(),context.getInternalDownloadLinks(),links);
        }
        //add to context parsed links
        if (links != null && links.size() >0){
            for (CrawlerRequestRecord requestRecord : links){
                context.addCrawlerRecord(requestRecord);
            }
        }else{
            logger.warn("record [{}] parsed links is null!",context.getCrawlerRecord());
        }
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        return null;
    }

    private void internalDownload(CrawlerRecordContext context,List<CrawlerRequestRecord> links){
        //extract internal download links
        List<CrawlerRequestRecord> internalDownloadLinks = new ArrayList<>();
        for (CrawlerRequestRecord requestRecord : links){
            if (requestRecord.tagsCreator().requestTags().hasRequestType(CrawlerEnum.CrawlerRequestType.internalDownload)){
                internalDownloadLinks.add(requestRecord);
            }
        }
        if (internalDownloadLinks.size()>0){
            links.removeAll(internalDownloadLinks);
        }
        //download links
        if (internalDownloadLinks == null || internalDownloadLinks.size()<=0){
            return;
        }
        for (CrawlerRequestRecord requestRecord : internalDownloadLinks){
            HttpPage page = context.getPageDownloader()
                    .download(requestRecord.getHttpRequest(),requestRecord.getHttpConfig());
            requestRecord.setInternalDownloadPage(page);
        }
        //set to context
        context.setInternalDownloadLinks(internalDownloadLinks);
    }

    private void washResult(CrawlerRecordContext context){
        CrawlerRequestRecord crawlerRecord = context.getCrawlerRecord();
        HttpPage page = context.getPage();

        List<CrawlerResultRecord> requestResults = new ArrayList<>();
        List<CrawlerData> crawlerDataList = washPage(crawlerRecord,page);
        if (crawlerDataList !=null && crawlerDataList.size()>0){
            for (CrawlerData crawlerData : crawlerDataList){
                //生成result record
                CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
                if (crawlerData.getFilter()!=null){
                    //filter info
                    crawlerResultRecord.setFilter(crawlerData.getFilter());
                    crawlerResultRecord.setFilterInfos(crawlerData.getFilterInfos());
                }
                crawlerResultRecord.setRecordKey(crawlerData.getDataId());
                crawlerResultRecord.setReleaseTime(crawlerData.getReleaseTime());
                //request tags
                crawlerResultRecord.setTags(crawlerData.getTags());
                //翻入过滤标签
                crawlerResultRecord.setFilterPipelineResult(crawlerData.isFilterPipelineResult());
                crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));
                requestResults.add(crawlerResultRecord);
            }
        }
        context.setRequestResults(requestResults);
    }
}
