package com.ruoyi.logCen.dKafka;

import com.alibaba.fastjson.JSONObject;
import com.ruoyi.logCen.controller.HandleKafkaLogMsg;
import com.ruoyi.logCen.domain.LogInfo;
import com.ruoyi.logCen.domain.LogRule;
import com.ruoyi.logCen.domain.LogTable;
import com.ruoyi.logCen.domain.LogTableColumn;
import com.ruoyi.logCen.service.*;
import com.ruoyi.logCen.util.oConvertUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

@Slf4j
@Service
public class KafkaMessageHandlerService {

    @Resource
    private LogInfoService logInfoService;

    @Resource
    private LogRuleService logRuleService;

    @Resource
    private LogTableService logTableService;

    @Resource
    private LogTableColumnService logTableColumnService;

    @Resource
    private GeneralService generalService;

    @Resource
    private DorisService dorisService;

    public void handleMessage(List<ConsumerRecord<String, Object>> records) {
        log.info("kafka收到消息records:{}", records);
        for (ConsumerRecord<String, Object> record : records) {
            // 打印消息的内容和元数据
//            System.out.println("Received Message: " + record.value());
//            System.out.println("Topic: " + record.topic());
//            System.out.println("Partition: " + record.partition());
//            System.out.println("Offset: " + record.offset());
            String recordValue = (String) record.value();
            log.info("kafka消息转换为recordValue:{}", recordValue);
            parse(recordValue.toString());
        }
    }
    /**
     * @param recordValue 业务数据
     */
    private void parse(String recordValue) {
        log.info("进入parseAlarmRules()方法,recordValue:" + recordValue);
        //将收到的业务数据转换成JSON对象
        JSONObject valueObject = JSONObject.parseObject(recordValue);
        log.info("将接收到的数据转换为valueObject");

        //具体业务逻辑处理
        LogInfo logInfo = new LogInfo(valueObject.get("ip").toString(), valueObject.get("logPath").toString());
        logInfo = logInfoService.selectOne(logInfo);

        LogRule logRule = new LogRule(logInfo.getLogType());
        logRule = logRuleService.selectOne(logRule);

        LogTable logTable = new LogTable(logRule.getTableId());
        logTable = logTableService.selectOne(logTable);

        LogTableColumn logTableColumn = new LogTableColumn(logTable.getId());
        List<LogTableColumn> logTableColumnList = logTableColumnService.getTableColumnList(logTableColumn);

        int consumeLogId = getNextSeq("consume_kafka_data");
        Map<String, Object> consumeKafkaLog = new HashMap<>();
        consumeKafkaLog.put("id", consumeLogId);
        consumeKafkaLog.put("logInfoId", logInfo.getId());
        consumeKafkaLog.put("message", valueObject.getString("logContent"));
        dorisService.insertConsumeKafkaLog(consumeKafkaLog);


        int analysisId = getNextSeq("analysis_log_kafka");
        StringBuilder insertSql = new StringBuilder();
        insertSql.append("insert into ").append(logTable.getTableName()).append("(");
        for (int i=0; i<logTableColumnList.size(); i++) {
            LogTableColumn logTableColumn1 = logTableColumnList.get(i);
            insertSql.append(logTableColumn1.getLogColumnName());

            if(i!=logTableColumnList.size()-1){
                insertSql.append(", ");
            }

        }




        insertSql.append(") values (")
                .append(analysisId).append(", ")
                .append(logInfo.getId()).append(", ")
                .append(consumeLogId).append(", ");

        String regex = logRule.getRegularExpression();

        String logLine = valueObject.getString("logContent");

        // 定义正则表达式
//        String regex = "^\\[(\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3})\\] (\\w+) \\[(.*?)\\] (.*)$";
        Pattern pattern = Pattern.compile(regex);
        Matcher matcher = pattern.matcher(logLine);

        // 匹配并提取内容
        if (matcher.find()) {
            String timestamp = matcher.group(1); // 时间
            String logLevel = matcher.group(2);  // 日志级别
            String logMessage = matcher.group(3); // 日志信息

            System.out.println("时间: " + timestamp);
            System.out.println("日志级别: " + logLevel);
            System.out.println("日志信息: " + logMessage);

            for (int i=3; i<logTableColumnList.size(); i++){
                insertSql.append("'");
                insertSql.append(matcher.group(i-2));
                insertSql.append("'");
                if(i!=logTableColumnList.size()-1){
                    insertSql.append(", ");
                }
            }

        } else {
            System.out.println("未匹配到日志格式");
        }

        insertSql.append(")");
        log.info("insertSql:" + insertSql.toString());

        dorisService.insertAnalysisLog(insertSql.toString());

    }

    public int getNextSeq(String seqName){
        Map<String, Object> nextSeq = generalService.getNextSeq(seqName);
        return oConvertUtils.isEmpty(nextSeq)?0:Integer.parseInt(nextSeq.get("currentVal").toString());
    }

}
