package com.zf.flink.simple;

import com.alibaba.fastjson.JSONObject;
import com.zf.flink.User;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;

@Component
public class UserSyncJob {
    @Autowired
    private MysqlCdcSource mysqlCdcSource;

    public void execute() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 配置Checkpoint
        env.enableCheckpointing(60000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

        // 1. 从MySQL CDC读取数据
        DataStream<String> cdcStream = mysqlCdcSource.createSource(env);

        // 2. 解析JSON为User对象
        DataStream<User> userStream = cdcStream
                .map(new UserDeserializationMapper())
                .filter(user -> user != null);

        // 3. 写入Redis缓存 (批量+哈希结构)
        FlinkJedisPoolConfig redisConfig = new FlinkJedisPoolConfig.Builder()
                .setHost("localhost")
                .setPort(6379)
                .build();
        userStream.addSink(new RedisSink<>(redisConfig, new UserRedisMapper()))
                .name("Redis Cache Sink");

        // 4. 写入MySQL数据库 (幂等更新)
//        userStream.addSink(new JdbcSink<User>())
//                .name("MySQL Sink");

        // 执行作业
        env.execute("User Data Sync Job");
    }

    // JSON解析Mapper
    public static class UserDeserializationMapper implements MapFunction<String, User> {
        @Override
        public User map(String value) {
            try {
                JSONObject json = JSONObject.parseObject(value);
                JSONObject after = json.getJSONObject("after");
                if (after != null) {
                    User user = new User();
                    user.setId(after.getLong("id"));
                    user.setName(after.getString("name"));
                    user.setAge(after.getInteger("age"));
//                    user.setEmail(after.getString("email"));
                    return user;
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
            return null;
        }
    }

    // Redis写入Mapper (哈希结构)
    public static class UserRedisMapper implements RedisMapper<User> {
        @Override
        public RedisCommandDescription getCommandDescription() {
            return new RedisCommandDescription(RedisCommand.HSET, "user:info");
        }

        @Override
        public String getKeyFromData(User data) {
            return data.getId().toString();
        }

        @Override
        public String getValueFromData(User data) {
            return JSONObject.toJSONString(data);
        }
    }

    // MySQL写入Sink (幂等更新)
    public static class JdbcSink extends RichSinkFunction<User> {
        private Connection connection;
        private PreparedStatement statement;

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            // 获取数据库连接 (实际项目中使用连接池)
            connection = DataSourceUtil.getConnection();
            statement = connection.prepareStatement(
                    "INSERT INTO user (id, name, age, email) VALUES (?, ?, ?, ?) " +
                            "ON DUPLICATE KEY UPDATE name=?, age=?, email=?"
            );
        }

        @Override
        public void invoke(User value, Context context) throws Exception {
            statement.setLong(1, value.getId());
            statement.setString(2, value.getName());
            statement.setInt(3, value.getAge());
//            statement.setString(4, value.getEmail());
            // 更新字段
            statement.setString(5, value.getName());
            statement.setInt(6, value.getAge());
//            statement.setString(7, value.getEmail());
            statement.executeUpdate();
        }

        @Override
        public void close() throws Exception {
            super.close();
            if (statement != null) statement.close();
            if (connection != null) connection.close();
        }
    }
}