package net.bwie.realtime.jtp.utils;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

public class KafkaUtils {
    /**
     * 从 Kafka 消费 JSON 数据并反序列化为指定对象
     * @param env 执行环境
     * @param topic 主题
     * @param clazz 目标对象的 Class（如 Order.class）
     * @param <T> 对象类型
     * @return 实体对象的数据流
     */
    public static <T> DataStream<T> consumerKafkaObject(
            StreamExecutionEnvironment env,
            String topic,
            Class<T> clazz) {

        // 1. 先消费 JSON 字符串
        DataStream<String> jsonStream = consumerKafka(env, topic);

        // 2. 反序列化为对象
        return jsonStream.map(json -> JSON.parseObject(json, clazz));
    }

    /**
     * 从 Kafka 消费 JSON 数据（最新偏移量）并反序列化为指定对象
     * @param env 执行环境
     * @param topic 主题
     * @param clazz 目标对象的 Class
     * @param <T> 对象类型
     * @return 实体对象的数据流
     */
    public static <T> DataStream<T> consumerKafkaObjectFromLatest(
            StreamExecutionEnvironment env,
            String topic,
            Class<T> clazz) {

        DataStream<String> jsonStream = consumerKafkaFromLatest(env, topic);
        return jsonStream.map(json -> JSON.parseObject(json, clazz));
    }

    /**
     * 将对象流序列化为 JSON 并发送到 Kafka
     * @param stream 实体对象的数据流
     * @param topic 主题
     * @param <T> 对象类型
     */
    public static <T> void producerKafkaObject(DataStream<T> stream, String topic) {
        // 1. 对象 -> JSON 字符串
        DataStream<String> jsonStream = stream.map(JSON::toJSONString);

        // 2. 调用原字符串发送方法
        producerKafka(jsonStream, topic);
    }

    // ------------------- 以下为原工具类方法（保持不变） -------------------
    public static DataStream<String> consumerKafka(StreamExecutionEnvironment env, String topic) {
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("node101:9092,node102:9092,node103:9092")
                .setTopics(topic)
                .setGroupId("gid-" + topic)
                .setStartingOffsets(OffsetsInitializer.earliest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        DataStreamSource<String> stream = env.fromSource(
                source, WatermarkStrategy.noWatermarks(), "Kafka Source"
        );
        return stream;
    }

    public static DataStream<String> consumerKafkaFromLatest(StreamExecutionEnvironment env, String topic) {
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("node101:9092,node102:9092,node103:9092")
                .setTopics(topic)
                .setGroupId("g-" + topic)
                .setStartingOffsets(OffsetsInitializer.latest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        DataStreamSource<String> stream = env.fromSource(
                source, WatermarkStrategy.noWatermarks(), "Kafka Source"
        );
        return stream;
    }

    public static void producerKafka(DataStream<String> stream, String topic) {
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "node101:9092,node102:9092,node103:9092");
        properties.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 15 * 60 * 1000);

        KafkaSerializationSchema<String> serializationSchema = new KafkaSerializationSchema<String>() {
            @Override
            public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long timestamp) {
                return new ProducerRecord<>(
                        topic, element.getBytes(StandardCharsets.UTF_8)
                );
            }
        };

        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<>(
                topic,
                serializationSchema,
                properties,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE
        );
        stream.addSink(myProducer);
    }
}
