package com.innodealing;

import com.innodealing.config.FlinkConfig;
import com.innodealing.constants.AppConstants;
import com.innodealing.constants.KafkaConstants;
import com.innodealing.factory.KafkaSourceFactory;
import com.innodealing.model.User;
import com.innodealing.model.UserKafkaMessage;
import com.innodealing.process.GlobalStateAggregator;
import com.innodealing.process.UserNameCountFunction;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.*;

public class UserNameCountJob {
    private static final Logger logger = LoggerFactory.getLogger(UserNameCountJob.class);

    public static void main(String[] args) throws Exception {

        // 1. 创建 Flink 环境
        final ParameterTool params = ParameterTool.fromArgs(args);
        final StreamExecutionEnvironment env = FlinkConfig.createStreamExecutionEnvironment(params);

        // 2. 获取配置参数
        final String kafkaBootstrapServers = FlinkConfig.getKafkaBootstrapServers(params);
        final String mysqlUrl = params.get(AppConstants.ParameterKeys.MYSQL_URL,
            "jdbc:mysql://pc-uf6m1w8l4i301u655.rwlb.rds.aliyuncs.com:3306/penghai_test");
        final String mysqlUsername = params.get(AppConstants.ParameterKeys.MYSQL_USERNAME, "ops_qa_polar");
        final String mysqlPassword = params.get(AppConstants.ParameterKeys.MYSQL_PASSWORD, "804QjW9Qk6V7EAEPZeMa");

        // 3. 创建数据库快照数据源（首次启动时读取）
        DatabaseUserSource dbSource = new DatabaseUserSource(mysqlUrl, mysqlUsername, mysqlPassword);
        DataStream<User> userSnapshotStream = env.addSource(dbSource, "Database User Snapshot Source")
                .setParallelism(1);

        // 4. 创建Kafka数据流（监听新增用户数据）
        KafkaSource<UserKafkaMessage> userKafkaSource = KafkaSourceFactory.createUserEventsSource(kafkaBootstrapServers);
        DataStream<UserKafkaMessage> userKafkaStream = env.fromSource(
                userKafkaSource,
                WatermarkStrategy.noWatermarks(),
                KafkaConstants.SourceNames.USER_EVENTS_SOURCE
        );

        // 5. 连接两个数据流并进行用户名统计
        DataStream<Tuple2<String, Long>> distributedState = userSnapshotStream
                .connect(userKafkaStream)
                .keyBy(User::getUsername, UserKafkaMessage::getUsername)
                .process(new UserNameCountFunction());

        DataStream<String> globalStateStream = distributedState
                .keyBy(t -> 1)
                .map(new GlobalStateAggregator());

        // 6. 打印结果
        globalStateStream.print("Username Count Result");

        // 7. 执行作业
        env.execute("User Name Count Job");
    }

    /**
     * 自定义数据库用户源函数
     */
    public static class DatabaseUserSource extends RichSourceFunction<User> {
        private final String jdbcUrl;
        private final String username;
        private final String password;
        private volatile boolean isRunning = true;

        public DatabaseUserSource(String jdbcUrl, String username, String password) {
            this.jdbcUrl = jdbcUrl;
            this.username = username;
            this.password = password;
        }

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            // 加载MySQL驱动
            Class.forName("com.mysql.cj.jdbc.Driver");
        }

        @Override
        public void run(SourceContext<User> ctx) throws Exception {
            try (Connection connection = DriverManager.getConnection(jdbcUrl, username, password)) {
                logger.info("连接数据库并读取用户数据");
                String sql = "SELECT id, username, email, age, create_time, update_time FROM user";
                try (PreparedStatement statement = connection.prepareStatement(sql);
                     ResultSet resultSet = statement.executeQuery()) {
                    
                    while (resultSet.next() && isRunning) {
                        User user = new User();
                        user.setId(resultSet.getInt("id"));
                        user.setUsername(resultSet.getString("username"));
                        user.setEmail(resultSet.getString("email"));
                        user.setAge(resultSet.getInt("age"));
                        user.setCreateTime(resultSet.getTimestamp("create_time"));
                        user.setUpdateTime(resultSet.getTimestamp("update_time"));
                        ctx.collect(user);
                    }
                }
            } catch (SQLException e) {
                throw new RuntimeException("Failed to read from database", e);
            }
        }

        @Override
        public void cancel() {
            isRunning = false;
        }
    }
}
