package com.lichao.FinkDataProcess.etl;

import com.lichao.FinkDataProcess.pojo.RuleBuilder;
import com.lichao.FinkDataProcess.pojo.Rules;
import com.lichao.FinkDataProcess.rel.RelNode;
import com.lichao.FinkDataProcess.sink.DynamicJdbcSink;
import com.lichao.FinkDataProcess.sink.DynamicKafkaSink;
import com.lichao.FinkDataProcess.source.CustomizeSource;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class RuleData {
    private static final Map<String, OutputTag<String>> SIDE_OUTPUT_TAG_TO_TOPIC_MAP = new HashMap<>();
    private static final Map<String, DynamicKafkaSink> KAFKA_SINK_MAP = new HashMap<>();
    private static final Map<String, OutputTag<String>> SIDE_OUTPUT_TAG_TO_JDBC_MAP = new HashMap<>();
    private static final Map<String, DynamicJdbcSink> JDBC_SINK_MAP = new HashMap<>();

    private static final List<Rules> RULES_LIST;

    // 初始化侧输出流标签和Sink
    static {
        RULES_LIST = fetchRulesFromDatabase();
        initializeSideOutputsAndSinks();
    }

    private static void initializeSideOutputsAndSinks() {
        RULES_LIST.forEach(rule -> {
            if (rule.getTopic() != null) {
                OutputTag<String> sideOutputTag = new OutputTag<>(rule.getTopic() + "_side-output", Types.STRING);
                SIDE_OUTPUT_TAG_TO_TOPIC_MAP.put(rule.getTopic(), sideOutputTag);
                KAFKA_SINK_MAP.put(rule.getTopic(), new DynamicKafkaSink(rule.getTopic(), 1));
            }

            if (rule.getTableName() != null) {
                OutputTag<String> sideOutputTag = new OutputTag<>(rule.getTableName() + "_side-output", Types.STRING);
                SIDE_OUTPUT_TAG_TO_JDBC_MAP.put(rule.getTableName(), sideOutputTag);
                JDBC_SINK_MAP.put(rule.getTableName(), new DynamicJdbcSink(rule.getTableName(), 1));
            }
        });
    }

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<String> sourceStream = env.addSource(new CustomizeSource());
        SingleOutputStreamOperator<String> mainStream = sourceStream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, ProcessFunction<String, String>.Context ctx, Collector<String> out) {
                RULES_LIST.forEach(rule -> {
                    if (RuleBuilder.compare(rule, value)) {
                        if (rule.getTopic() != null) {
                            OutputTag<String> sideOutputTag = SIDE_OUTPUT_TAG_TO_TOPIC_MAP.get(rule.getTopic());
                            ctx.output(sideOutputTag, value);
                        }

                        if (rule.getTableName() != null) {
                            OutputTag<String> sideOutputTag = SIDE_OUTPUT_TAG_TO_JDBC_MAP.get(rule.getTableName());
                            ctx.output(sideOutputTag, value);
                        }
                    }
                });
            }
        });

        SIDE_OUTPUT_TAG_TO_TOPIC_MAP.forEach((topic, tag) -> {
            DataStream<String> sideOutputStream = mainStream.getSideOutput(tag);
            DynamicKafkaSink sink = KAFKA_SINK_MAP.get(topic);
            if (sink.parallelism>0){
                sideOutputStream.addSink(sink).setParallelism(sink.parallelism);
            }else{
                sideOutputStream.addSink(sink);
            }

        });

        SIDE_OUTPUT_TAG_TO_JDBC_MAP.forEach((tableName, tag) -> {
            DataStream<String> sideOutputStream = mainStream.getSideOutput(tag);
            DynamicJdbcSink sink = JDBC_SINK_MAP.get(tableName);
            if (sink.parallelism>0){
                sideOutputStream.addSink(sink).setParallelism(sink.parallelism);
            }else{
                sideOutputStream.addSink(sink);
            }
        });

        env.execute("Flink Job with Custom Source and Side Outputs");
    }


    private static List<Rules> fetchRulesFromDatabase() {
        List<Rules> dataList = new ArrayList<>();
        String jdbcUrl = "jdbc:mysql://localhost:3306/springboottest";
        String username = "root";
        String password = "123456";
        String query = "SELECT * FROM rule";

        try (Connection connection = DriverManager.getConnection(jdbcUrl, username, password);
             PreparedStatement preparedStatement = connection.prepareStatement(query);
             ResultSet resultSet = preparedStatement.executeQuery()) {

            while (resultSet.next()) {
                int id = resultSet.getInt("id");
                String name = resultSet.getString("name");
                String topic = resultSet.getString("topic");
                String tableName = resultSet.getString("table_name");
                String arguments = resultSet.getString("arguments");
                RelNode relNode = RuleBuilder.buildRule(arguments);
                dataList.add(new Rules(id, name, topic, tableName, arguments, relNode));
            }

        } catch (SQLException e) {
            throw new RuntimeException("Failed to fetch rules from database", e);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        return dataList;
    }
}
