package com.appkubes.fintech.admin.application.risk.data.xingpan.service.impl;

import com.appkubes.fintech.admin.application.risk.data.xingpan.Constants;
import com.appkubes.fintech.admin.application.risk.data.xingpan.JsonUtil;
import com.appkubes.fintech.admin.application.risk.data.xingpan.dao.RiskXingPanOtherDao;
import com.appkubes.fintech.admin.application.risk.data.xingpan.http.HttpService;
import com.appkubes.fintech.admin.application.risk.data.xingpan.service.XingPanRiskOtherService;
import com.appkubes.fintech.admin.application.user.dao.UserBaseInfoDao;
import com.appkubes.fintech.admin.application.user.dao.UserDao;
import com.appkubes.fintech.common.response.Response;
import com.appkubes.fintech.core.po.FintechRiskXingpangOtherWithBLOBs;
import com.appkubes.fintech.core.po.FintechUser;
import com.appkubes.fintech.core.po.FintechUserBaseInfo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * 类名: XingPanRiskOtherServiceImpl
 * 描述: TODO
 * 日期: 2020/5/4 17:10
 *
 * @author Reboot65
 * @version 1.0
 **/
@Service
@Slf4j
public class XingPanRiskOtherServiceImpl implements XingPanRiskOtherService {

    @Resource
    private RiskXingPanOtherDao riskXingPanOtherDao;

    @Resource
    private UserDao userDao;

    @Resource
    private UserBaseInfoDao userBaseInfoDao;

    @Resource
    private AsyncCrawler asyncCrawler;

    @Override
    public Response getOperator(String userKey) {
        FintechRiskXingpangOtherWithBLOBs xingpan = riskXingPanOtherDao.selectByPrimaryKey(userKey);
        if (xingpan == null || xingpan.getOperatorresult()==null || xingpan.getOperatorresult().equals("")) {
            return Response.fail("找不到该用户数据");
        }
        return Response.success(xingpan.getOperatorresult());
    }

    @Override
    public Response getOperatorReport(String userKey) {
        FintechRiskXingpangOtherWithBLOBs xingpan = riskXingPanOtherDao.selectByPrimaryKey(userKey);
        if (xingpan == null || xingpan.getOperatorreportresult() == null || xingpan.getOperatorreportresult().equals("")) {
            return Response.fail("找不到该用户数据");
        }
        return Response.success(xingpan.getOperatorreportresult());
    }

    @Override
    public Response crawlerData(String userKey, String phone, String pass) {
        FintechUser user = userDao.selectByPrimaryKey(userKey);
        if (user == null) {
            return Response.fail("无该用户");
        }

        FintechUserBaseInfo baseInfo = userBaseInfoDao.selectByPrimaryKey(userKey);
        if (baseInfo == null) {
            return Response.fail("无该用户");
        }

        FintechRiskXingpangOtherWithBLOBs rishXingpang =  riskXingPanOtherDao.selectByPrimaryKey(userKey);
        if (rishXingpang == null) {
            rishXingpang = new FintechRiskXingpangOtherWithBLOBs();
            rishXingpang.setCreateTime(new Date());
            rishXingpang.setPhone(phone);
            rishXingpang.setUserKey(userKey);
            rishXingpang.setPassword(pass);
            riskXingPanOtherDao.insertSelective(rishXingpang);
        }

        //获取通话记录 和运营商报告
        String strResult = rishXingpang.getOperatorresult();
        if (strResult == null || strResult.equals("")) {
            Map map = asyncCrawler.startCrawler(baseInfo,phone, pass, Constants.CrawlerType_OperatorReport);
            String operatorCrawlerId = (String) map.get("crawlerId");
            String operatorCrawlerToken = (String) map.get("crawlerToken");

            rishXingpang.setCrawlerid(operatorCrawlerId);
            rishXingpang.setCrawlertoken(operatorCrawlerToken);
            riskXingPanOtherDao.updateByPrimaryKey(rishXingpang);

            Response response = getCrawlerMsg(userKey,operatorCrawlerId,operatorCrawlerToken,true);
            if (response != null) {
                return response;
            }
        }
        return Response.success();
    }

    @Override
    public Response appendData(String userKey, String name, String code, String crawlerId, String crawlerToken) {
        FintechUserBaseInfo baseInfo = userBaseInfoDao.selectByPrimaryKey(userKey);
        if (baseInfo == null) {
            return Response.fail("无该用户");
        }

        Map<String, String> appendData = new HashMap<>();
        appendData.put(name,code);
        HttpService httpService = HttpService.newInstance();
        Map map = asyncCrawler.operateCrawler(httpService, Constants.CRAWLER_OPERATE_METHOD_CrawlerAppendData, crawlerId, crawlerToken, JsonUtil.toJson(appendData));
        if (!Constants.SUCCESS_CODE.equals(map.get("code"))) {
            log.error("追加数据失败：{}", map.get("message"));
            return Response.fail("追加数据失败："+map.get("message"));
        }
        Response response = getCrawlerMsg(userKey,crawlerId,crawlerToken,true);
        if (response == null) {
            return Response.success();
        }
        return response;
    }

    public Response getCrawlerMsg(String userKey, String crawlerId, String crawlerToken,boolean isFirst){
        HttpService httpService = HttpService.newInstance();
        int waitCount = 0;
        while (true) {
            asyncCrawler.sleep(5000L); // 等待五秒后获取爬虫信息
            Map map = asyncCrawler.operateCrawler(httpService, Constants.CRAWLER_OPERATE_METHOD_CrawlerGetInfo, crawlerId, crawlerToken, null);
            if (!Constants.SUCCESS_CODE.equals(map.get("code"))) {
                log.error("获取爬虫信息失败：", map.get("message"));
                return Response.fail("获取信息失败");
            }
            Map dataMap = (Map) map.get("data");
            String status = (String) dataMap.get("status"); // Crawler运行状态
            if (Constants.CRAWLER_STATUS_Success.equals(status)) {
                log.info("数据爬取成功");
                // 可获取爬取结果
                String operatorReportData = asyncCrawler.operateCrawlerWithStr(httpService, Constants.CRAWLER_OPERATE_METHOD_CrawlerGetData, crawlerId, crawlerToken, null);
                String operatorData = asyncCrawler.operateCrawlerWithStr(httpService, Constants.CRAWLER_OPERATE_METHOD_CrawlerGetOriginalData, crawlerId, crawlerToken, null);
                log.info("数据爬取成功,运营商报告=" + operatorReportData);
                log.info("数据爬取成功,通话记录=" + operatorData);
                asyncCrawler.CacheOtherCrawlerData(userKey, operatorData, operatorReportData);
                return null;
            } else if (Constants.CRAWLER_STATUS_Failure.equals(status)) {
                log.info("数据爬取失败：{}", dataMap.get("message"));
                return Response.fail("获取信息失败:" + dataMap.get("message"));
            } else if (Constants.CRAWLER_STATUS_Crawling.equals(status)) {
                waitCount++;
                //如果是第一次启动，则要等待爬虫结果，最长等待30s
                if (isFirst) {
                    if (waitCount<=5)
                        continue;
                    else{
                        log.info("爬虫运行时间过长，强制停止");
                        return Response.fail("爬虫运行时间过长，强制停止");
                    }
                }
                else {
                    log.info("爬虫爬行中，启动异步线程");
                    asyncCrawler.WaitingCrawler(userKey, crawlerId, crawlerToken);
                    //"爬行中，请等待"
                    return Response.success();
                }
            } else if (Constants.CRAWLER_STATUS_WaitAppendData.equals(status)) {
                List<Map> messages = (List<Map>) dataMap.get("message");
                Map<String, String> appendData = new HashMap<>();

                messages.forEach(e -> {
                    String name = (String) e.get("name");
                    StringBuilder promptBuilder = new StringBuilder();
                    if (Constants.APPEND_DATA_NAME_SMS_VERIFY_CODE.equals(name)) {
                        promptBuilder.append("请输入短信验证码");
                        appendData.put("statu", Constants.Status_SmsCode);

                    } else if (Constants.APPEND_DATA_NAME_IMAGE_VERIFY_CODE.equals(name)) {
                        System.out.println("图片信息: " + e.get("imageUrl"));
                        promptBuilder.append("请输入图片验证码(复制上方图片信息，粘贴到浏览器地址栏中查看)");
                        appendData.put("statu", Constants.Status_imageCode);
                        appendData.put("imageUrl", e.get("imageUrl").toString());
                    }
                    appendData.put("name", name);
                    appendData.put("crawlerId", crawlerId);
                    appendData.put("crawlerToken", crawlerToken);

                });
                return Response.success(JsonUtil.toJson(appendData));
            }
            else {
                log.error("异常，获取爬虫信息失败：", map.get("message"));
                return Response.fail("获取信息错误");
            }
        }
    }
}
