package com.lichao.FinkDataProcess.etl;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.*;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.time.Duration;
import java.util.*;

public class KafkaMysqlRuleMatching {

    // 定义侧输出标签用于输出到下游Kafka
    private static final OutputTag<String> DOWNSTREAM_OUTPUT = new OutputTag<String>("downstream-output") {
    };

    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // Kafka Source: 从Kafka消费JSON数据
        Properties kafkaProps = new Properties();
        kafkaProps.setProperty("bootstrap.servers", "localhost:9092");
        kafkaProps.setProperty("group.id", "flink-group");

        FlinkKafkaConsumer<String> kafkaSource = new FlinkKafkaConsumer<>("events", new SimpleStringSchema(), kafkaProps);
        kafkaSource.assignTimestampsAndWatermarks(WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofSeconds(10)));

        DataStream<String> eventStream = env.addSource(kafkaSource);

        // 从MySQL读取规则数据
        Map<String, Set<String>> rulesMap = loadRulesFromMySQL();

        // 将规则数据广播出去
        BroadcastStream<Map.Entry<String, Set<String>>> rulesBroadcastState = env.fromCollection(rulesMap.entrySet())
                .broadcast(new MapStateDescriptor<>("rulesBroadcastState", String.class, (Class<Set<String>>) (Class) Set.class));

        // 处理输入流，应用规则匹配
        eventStream.keyBy(value -> extractMessageId(value))
                .connect(rulesBroadcastState)
                .process(new RuleMatchingProcessFunction())
                .getSideOutput(DOWNSTREAM_OUTPUT)
                .addSink(createKafkaSink());

        env.execute("Kafka Mysql Rule Matching Example");
    }

    // 规则匹配与处理
    public static class RuleMatchingProcessFunction extends KeyedBroadcastProcessFunction<String, String, Map.Entry<String, Set<String>>, String> {

        // 定义状态描述符，存储最近一小时内的消息ID
        private final MapStateDescriptor<String, Long> recentMessagesStateDesc =
                new MapStateDescriptor<>("recentMessages", String.class, Long.class);

        @Override
        public void open(Configuration parameters) throws Exception {
            //processedMessagesState = getRuntimeContext().getState(new ValueStateDescriptor<>("processedMessages", HashSet.class));
        }

        @Override
        public void processElement(String value, ReadOnlyContext ctx, Collector<String> out) throws Exception {
            long currentTime = System.currentTimeMillis();
            long oneHourAgo = currentTime - Duration.ofHours(1).toMillis();

            // 获取当前状态
            MapState<String, Long> recentMessagesState = getRuntimeContext().getMapState(recentMessagesStateDesc);

            // 清理过期的消息ID
            for (Map.Entry<String, Long> entry : recentMessagesState.entries()) {
                if (entry.getValue() < oneHourAgo) {
                    recentMessagesState.remove(entry.getKey());
                }
            }

            // 构建唯一键值，由规则ID和消息ID组成
            String messageId = extractMessageId(value);

            ReadOnlyBroadcastState<String, Set<String>> broadcastState = ctx.getBroadcastState(new MapStateDescriptor<>("rulesBroadcastState", String.class, (Class<Set<String>>) (Class) Set.class));

            boolean shouldEmit = true;
            for (Map.Entry<String, Set<String>> ruleEntry : broadcastState.immutableEntries()) {
                String ruleId = ruleEntry.getKey();
                String uniqueKey = ruleId + "-" + messageId;

                // 检查是否已处理过
                if (recentMessagesState.contains(uniqueKey)) {
                    shouldEmit = false;
                    break;
                }
            }

            if (shouldEmit) {
                // 更新状态
                for (Map.Entry<String, Set<String>> ruleEntry : broadcastState.immutableEntries()) {
                    String ruleId = ruleEntry.getKey();
                    String uniqueKey = ruleId + "-" + messageId;
                    recentMessagesState.put(uniqueKey, currentTime);
                }

                // 输出结果
                ctx.output(DOWNSTREAM_OUTPUT, value);
            }
        }

        @Override
        public void processBroadcastElement(Map.Entry<String, Set<String>> value, Context ctx, Collector<String> out) {
            // 广播元素的处理逻辑（如果有的话）
        }
    }

    // 从MySQL加载规则数据
    private static Map<String, Set<String>> loadRulesFromMySQL() {
        Map<String, Set<String>> rulesMap = new HashMap<>();
        try (Connection conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/test_db", "root", "password")) {
            PreparedStatement stmt = conn.prepareStatement("SELECT rule_id, pattern FROM rules");
            ResultSet rs = stmt.executeQuery();
            while (rs.next()) {
                String ruleId = rs.getString("rule_id");
                String pattern = rs.getString("pattern");
                rulesMap.computeIfAbsent(ruleId, k -> new HashSet<>()).add(pattern);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return rulesMap;
    }

    // 创建Kafka Sink
    private static FlinkKafkaProducer<String> createKafkaSink() {
        Properties producerProperties = new Properties();
        producerProperties.setProperty("bootstrap.servers", "localhost:9092");
        return new FlinkKafkaProducer<>("output-topic", new SimpleStringSchema(), producerProperties);
    }

    // 提取消息ID的方法
    private static String extractMessageId(String message) {
        // 这里只是一个示例解析方法，实际应用中需要根据实际情况调整
        int startIdx = message.indexOf("\"id\":\"") + 6;
        int endIdx = message.indexOf("\",", startIdx);
        return message.substring(startIdx, endIdx);
    }
}