package cn.touna.jss.common.mq;

import cn.touna.jss.common.entity.CreditQueryRequest;
import cn.touna.jss.common.enums.IntfScopeEnum;
import cn.touna.jss.common.properties.config.HbaseTableTopicProperties;
import cn.touna.jss.common.util.DateUtils;
import cn.touna.jss.common.util.JssConstant;
import cn.touna.jss.modules.adapter.ApiFactory;
import cn.touna.jss.modules.adapter.ApiUtils;
import cn.touna.jss.modules.credit.entity.CreditQueryHistoryEntity;
import cn.touna.jss.modules.credit.entity.CreditRecordRef;
import cn.touna.risk.api.domain.Response;
import cn.touna.risk.api.utils.IntfTypeUtil;
import cn.touna.risk.api.utils.MD5Util;
import cn.touna.risk.cache.redis.RedisCache;
import cn.touna.risk.mq.common.RiskKafkaProducer;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.touna.loan.log.service.TracerContextUtil;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import static cn.touna.risk.api.enums.SystemErrorCodeEnum.CRS_DATA_REPORT_SUCC;


/**
 * Created by wuwc on 2017/9/2.
 */
@Component
public class KafkaComponent {

    private static final Logger logger = LogManager.getLogger(KafkaComponent.class);

    @Autowired
    private RiskKafkaProducer riskKafkaProducer;

    @Autowired
    private HbaseTableTopicProperties topicProperties;

    @Autowired
    private RedisCache redisCache;


    /**
     * 发送触发调用返回数据
     *
     * @param request
     * @param response
     * @return
     */
    public void sendSyncCallData(CreditQueryRequest request, Response response) {
        try {
            String data = KafkaUtils.assembleKafkaMsg(request, response);
            logger.info("【反欺诈推送结果】,topic:{},method:{},result:{}", topicProperties.getProfileTopic(), request.getMethod(), data);
            riskKafkaProducer.sendAsyncMessage(request.getCrNo(), topicProperties.getProfileTopic(), data);
        } catch (Exception e) {
            logger.error("【反欺诈推送异常】,topic:{},method：{},msg：{}", topicProperties.getProfileTopic(), request.getMethod(), e.getMessage());
        }
    }

    /**
     * 发送调用记录数据，用于定时三方结构化
     * @param request
     * @param response
     */
    public void sendQueryHistory(CreditQueryRequest request, Response response) {
        try {
            String result = KafkaUtils.assmbleQueryResult(request, response);
            riskKafkaProducer.sendAsyncMessage(request.getCrNo(), topicProperties.getCreditHistoryTopic(), result);
            logger.info("【调用记录成功】,topic:{},result:{}", topicProperties.getCreditHistoryTopic(), result);
        } catch (Exception e) {
            logger.error("【调用记录异常】,topic:{},method:{}", topicProperties.getCreditHistoryTopic(), request.getMethod());
        }
    }

    /**
     * 发送hbase数据的rowkey
     *
     * @param recordRef
     */
    public void sendQueryResult(CreditRecordRef recordRef) {
        logger.info("【推送查询结果】,topic:{},method:{}", topicProperties.getRowKeyTopic(), recordRef.getMethodName());
        riskKafkaProducer.sendAsyncMessage(recordRef.getCrNo(), topicProperties.getRowKeyTopic(), JSON.toJSONString(recordRef));
    }

    /**
     * 发生埋点上报数据
     *
     * @param msg
     */
    public Response sendDataReportMsg(String msg) {
        Response response = new Response();
        String traceId = TracerContextUtil.getTraceId();
        riskKafkaProducer.sendAsyncMessage(traceId, topicProperties.getDataTopic(), msg);
        logger.info("【埋点上报推送结束】topic:{}", topicProperties.getDataTopic());
        return response.success(CRS_DATA_REPORT_SUCC, traceId);
    }

    /*
     * @Author wuwc
     * @Description 发送ApiMapping信息到ops系统
     * @Date 15:35 2018/12/10
     * @Param [map]
     * @return void
     **/
    public void sendApiMessage(Map<String, ApiFactory.ApiRunnable> map) {
        if (map == null || map.isEmpty()) {
            return;
        }
        List<Map<String, String>> msg = new ArrayList<>();
        for (String key : map.keySet()) {
            try {
                String scopeCode=map.get(key).getScope().getCode();
                if(!IntfScopeEnum.JSS_INTF_TEST.equals(scopeCode)){
                    Map<String, String> m = new HashedMap();
                    m.put("serviceName", IntfTypeUtil.splitIntfType(map.get(key).getApiName())[0]);
                    m.put("methodName", IntfTypeUtil.splitIntfType(map.get(key).getApiName())[1]);
                    m.put("serviceDesc", StringUtils.isEmpty(map.get(key).getApiDesc()) ? null : IntfTypeUtil.splitIntfType(map.get(key).getApiDesc())[0]);
                    m.put("methodDesc", StringUtils.isEmpty(map.get(key).getApiDesc()) ? null : IntfTypeUtil.splitIntfType(map.get(key).getApiDesc())[1]);
                    m.put("version", map.get(key).getVersion());
                    m.put("scope", scopeCode);
                    msg.add(m);
                }
            } catch (Exception e) {
                logger.error(map.get(key).getApiName() + "，" + map.get(key).getApiDesc() + "：格式书写错误", e);
            }
        }
        String apiMsgMd5= MD5Util.MD5(JSON.toJSONString(msg));
        String apiCacheMd5=(String)redisCache.get(JssConstant.REDIS_KEY_INTF_SCOPE);
        logger.info("接口定义新MD5值：{}，旧MD5：{}",apiMsgMd5,apiCacheMd5);
        //比较每次接口配置的MD5值，值有更新则同步到OPS更新，没有则不同步
        if(StringUtils.isBlank(apiCacheMd5) || !StringUtils.equals(apiMsgMd5,apiCacheMd5)){
            riskKafkaProducer.sendAsyncMessage(topicProperties.getApiTopic(), JSON.toJSONString(msg));
            logger.info("【推送API定义结果】,topic:{},msg:{}", topicProperties.getApiTopic(), JSON.toJSONString(msg));
            redisCache.set(JssConstant.REDIS_KEY_INTF_SCOPE,apiMsgMd5);
        }
    }
}
