package com.xzj.admin.resource.analysis.handle.analysis.pg;

import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.xzj.admin.analysis.base.common.cache.SubResourceCache;
import com.xzj.admin.analysis.base.common.cache.WaitDelCache;
import com.xzj.admin.analysis.base.common.handle.CommonAbstractAnalysisHandle;
import com.xzj.admin.analysis.base.constants.CommonEnums;
import com.xzj.admin.analysis.base.domain.CommonFlowInfo;
import com.xzj.admin.analysis.base.util.AnalysisCommonUtil;
import com.xzj.admin.resource.analysis.cache.BaseCache;
import com.xzj.admin.resource.analysis.domain.entity.ResMessageSource;
import com.xzj.admin.resource.analysis.domain.props.BaseProps;
import com.xzj.admin.resource.analysis.domain.props.CloudProps;
import com.xzj.admin.resource.analysis.handle.async.AsyncRowHandle;
import com.xzj.admin.resource.analysis.handle.redis.CustomRedisHandle;
import com.xzj.common.plugins.disruptor.util.DisruptorUtil;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.stereotype.Component;

import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Consumer;

/**
 * @program: xzj-admin
 * @ClassName: MsgAnalysisHandle
 * @description: 消息解析处理
 * @author: XiaZunJun
 * @create: 2024-11-04 17:03
 */
@Component
@Slf4j
public class MsgAnalysisHandle {

    @Resource
    private BaseProps baseProps;

    @Resource
    private MsgAnalysisEndHandle msgAnalysisEndHandle;
    @Resource
    private AsyncRowHandle asyncRowHandle;

    @Resource
    private MsgBatchToPg msgBatchToPg;

    @Resource
    private CustomRedisHandle<String> customRedisHandle;

    @Resource
    private CommonAbstractAnalysisHandle commonAbstractAnalysisHandle;

    private final DisruptorUtil<CommonFlowInfo> disruptorUtil =
            new DisruptorUtil<>("middleAfter", new Consumer<CommonFlowInfo>() {
                @Override
                public void accept(CommonFlowInfo info) {
                    msgAnalysisEndHandle.analysisEndHandle(info);
                }
            });

    /**
     * 锁对象 获取redis缓存道本地的锁
     */
    private static final Lock REDIS_GET_LOCK = new ReentrantLock();

    /**
     * 解析kafka消息数据
     *
     * @param record kafka消息记录
     * @author xiazunjun
     * @date 2023/8/15 17:12
     */
    public void analysisKafkaMsg(ConsumerRecord<String, String> record) {
        try {
            Optional<String> message = Optional.ofNullable(record.value());
            // 判断消息是否存在
            if (message.isPresent()) {
                String msg = message.get();
                JSONObject msgJson = JSONObject.parseObject(msg);
                CloudProps cloudProps = baseProps.getCloud();
                commonAbstractAnalysisHandle.analysisBefore(msgJson);
                CommonFlowInfo info =
                        CommonFlowInfo.create(msgJson, record.timestamp(), cloudProps.getId(),
                                cloudProps.getType());
                log.info("kafka主题:{}-分区:{}-接收到时间:{},资源池:{},省份:{},资源消息:{}", record.topic(),
                        record.partition(), info.getDateStr(), info.getRmPoolId(), info.getStandardCode(),
                        JSON.toJSONString(msgJson.keySet()));
                if (StrUtil.isEmpty(info.getRmPoolId()) || StrUtil.isEmpty(info.getStandardCode())) {
                    log.error("kafka主题:{}-分区:{}-时间:{}-原始数据,当前资源{}无资源池和省份信息，跳过同步",
                            record.topic(), record.partition(), info.getDateStr(),
                            JSON.toJSONString(msgJson.keySet(), msg));
                    return;
                }
                // 如果是最终推送消息，则发送cmdb同步请求
                if (msgJson.containsKey("pushEnd")) {
                    log.info("走入最终推送消息");
                    disruptorUtil.sendMessage(info);
                    return;
                }
                //判断每条消息的数据处理标签函数方法，用于消息新增修改判断
                info.setJudgeRowTagFun((commonFlowInfo, resMessageManage) -> msgBatchToPg.judgeRowTag(commonFlowInfo, resMessageManage));
                foreachJson(msgJson, info);
            }
        } catch (Exception e) {
            log.error("analysisKafkaMsg-消息解析异常-{}",JSON.toJSONString(record), e);
        }
    }

    /**
     * 根据需要解析的消息，进行array获取并进行解析处理
     *
     * @param msgJson kafka源数据
     * @param info    解析流转对象，主要公用信息的保存
     * @author xiazunjun
     * @date 2023/8/15 17:21
     */
    private void foreachJson(JSONObject msgJson, CommonFlowInfo info) {
        // 获取消息中的所有第一层标签
        Set<String> sourceThemes = msgJson.keySet();
        // 遍历循环
        sourceThemes.forEach(key -> {
            // 判断标签是否在解析范围，不在则跳过
            if (BaseCache.messageSourceContains(key)) {
                info.putCacheKeyPrefix(key);
                JSONObject sourceJson = msgJson.getJSONObject(key);
                if (null == sourceJson) {
                    sourceJson = new JSONObject();
                    sourceJson.put(key, msgJson.getJSONArray(key));
                }
                themeToAnalysis(sourceJson, info);
            } else if (!CommonEnums.JSON_FIELD.EXTEND.judgeEquals(key)) {
                log.info("{}-{}当前资源不在同步范围，跳过", info.getDateStr(), key);
            }
        });
    }
    /**
     * 根据消息源主题进行相关处理
     *
     * @param sourceJson 消息实体
     * @param info       解析流转对象，主要公用信息的保存
     * @author xiazunjun
     * @date 2023/8/15 17:19
     */
    private void themeToAnalysis(JSONObject sourceJson, CommonFlowInfo info) {
        try {
            // 拉取redis缓存的md5集合到本地
            ctgToLocalCache(info);
            // 获取消息源
            ResMessageSource source = BaseCache.getMessageSource(info.getSourceTheme());
            // 构建解析主题消息
            List<JSONObject> sourceObjects = buildFlowAnalysisJson(sourceJson,source);
            // 判断解析主题消息集合是否大于零
            if (!sourceObjects.isEmpty()) {
                CountDownLatch latch = new CountDownLatch(sourceObjects.size());
                info.setDownLatch(latch);
                // 解析主题消息集合进行解析
                sourceObjects.forEach(sourceObject -> {
                    // 构建解析主题消息
                    asyncRowHandle.foreachRow(info, sourceObject, source);
                });
                latch.await();
                Map<String, String> md5Map = WaitDelCache.getMd5Map(info.getSourceTheme(), info.getRmPoolId());
                msgBatchToPg.batchToPg(info,md5Map);
            } else {
                log.info("{}当前资源无数据，跳过", info.getDateStr());
            }
            String subKey = AnalysisCommonUtil.commonBuildStr(info.getCacheKeyPrefix(), info.getSourceTheme(),info.getThreadName());
            JSONObject subObject = SubResourceCache.getSubAnalysisObject(subKey);
            if (ObjectUtil.isNotEmpty(subObject)) {
                foreachJson(subObject, info);
                // 处理完子资源后，需要附主资源的主题
                if(StrUtil.isEmpty(source.getParentId())){
                    info.setSourceTheme(source.getSourceTheme());
                }
            }
        } catch (InterruptedException e) {
            log.error("themeToAnalysis-{}-{} 根据消息源主题进行相关处理异常", info.getDateStr(), info.getSourceTheme(),
                    e);
        }

    }


    public List<JSONObject> buildFlowAnalysisJson(JSONObject sourceJson,ResMessageSource source){
        List<JSONObject> sourceObjects;
        if (StrUtil.isNotEmpty(source.getSourceMainTag())) {
            if (StrUtil.isNotEmpty(source.getSourcePeerTag())) {
                Object sourceTagJsonStr = sourceJson.get(source.getSourceMainTag());
                sourceObjects = new ArrayList<>();
                if (sourceTagJsonStr instanceof JSONArray) {
                    JSONArray foreachArray = sourceJson.getJSONArray(source.getSourceMainTag());
                    for (int i = 0; i < foreachArray.size(); i++) {
                        sourceObjects.add(foreachArray.getJSONObject(i));
                    }
                } else {
                    sourceObjects.add(sourceJson.getJSONObject(source.getSourceMainTag()));
                }
                for (JSONObject sourceObject : sourceObjects) {
                    String[] otherTags = source.getSourcePeerTag().split(",");
                    for (String otherTag : otherTags) {
                        String subStr = sourceJson.getString(otherTag);
                        if (null != subStr) {
                            if (subStr.startsWith("{")) {
                                sourceObject.put(otherTag, JSONObject.parseObject(subStr));
                            } else if (subStr.startsWith("[")) {
                                sourceObject.put(otherTag, JSONArray.parseArray(subStr));
                            } else {
                                sourceObject.put(otherTag, subStr);
                            }
                        }
                    }
                }
            } else {
                Object sourceTagJsonStr = sourceJson.get(source.getSourceMainTag());
                if (sourceTagJsonStr instanceof JSONArray) {
                    sourceObjects = sourceJson.getJSONArray(source.getSourceMainTag()).toList(JSONObject.class);
                } else {
                    sourceObjects = new ArrayList<>();
                    sourceObjects.add(sourceJson.getJSONObject(source.getSourceMainTag()));
                }
            }
        } else {
            sourceObjects = new ArrayList<>();
            sourceObjects.add(sourceJson);
        }
        return sourceObjects;
    }

    /**
     * 拉取ctg-cache缓存到本地，为后续功能辅助
     *
     * @param info 解析流转对象，主要公用信息的保存
     * @author xiazunjun
     * @date 2023/8/18 9:44
     */
    private void ctgToLocalCache(CommonFlowInfo info) {
        REDIS_GET_LOCK.lock();
        try {
            if (WaitDelCache.existKey(info.getSourceTheme(), info.getRmPoolId())) {
                return;
            }
            // 评级功能标签
            String md5Key = AnalysisCommonUtil.commonBuildStr(info.getCacheKeyPrefix(),info.getSourceTheme(), CommonEnums.CACHE_KEY.MD5.name());
            Map<String, String> cacheMd5Map = customRedisHandle.getCacheMap(md5Key);
            cacheMd5Map.put(CommonEnums.CACHE_KEY.PREFIX.getValue(), md5Key);
            WaitDelCache.putMap(info.getSourceTheme(), info.getRmPoolId(), cacheMd5Map);
        } finally {
            REDIS_GET_LOCK.unlock();
        }
    }

}
