package com.atguigu.kafka.flink;

import com.atguigu.kafka.flink.model.Person;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.time.Duration;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.Properties;

public class test {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // Kafka 消费者配置
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.254.131.147:9092");
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "fink_group1");
        properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");

        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>("flink_test", new SimpleStringSchema(), properties);
        kafkaConsumer.setStartFromGroupOffsets();

        DataStream<String> kafkaStream = env.addSource(kafkaConsumer);

        // 解析数据
        kafkaStream.flatMap(new PersonFlatMapFunction()).name("格式化数据")
                .assignTimestampsAndWatermarks(WatermarkStrategy.<Person>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner((element, recordTimestamp) -> {
                            return element.getDatetime().atZone(ZoneOffset.systemDefault()).toInstant().toEpochMilli();
                        }))
                .keyBy(Person::getName)
                .process(new ExtractMinMaxPersonFunction()).print();

//        DataStream<Message> parsedStream = kafkaStream
//                .map(new MapFunction<String, Message>() {
//                    @Override
//                    public Message map(String value) {
//                        // 假设 value 是一个 JSON 字符串，包含 timestamp 和 groupField
//                        // 这里简化为直接创建一个 Message 对象
//                        return new Message(LocalDateTime.now(), "group1");
//                    }
//                })
//                .returns(TypeInformation.of(Message.class));
//
//        // 分配时间戳和水印
//        SingleOutputStreamOperator<Message> timestampedStream = parsedStream
//                .assignTimestampsAndWatermarks(
//                        WatermarkStrategy.<Message>forBoundedOutOfOrderness(Duration.ofSeconds(0))
//                                .withTimestampAssigner((message, recordTimestamp) -> {
//                                    return message.getTimestamp().atZone(ZoneOffset.UTC).toInstant().toEpochMilli();
//                                })
//                );
//
//        // 分组
//        SingleOutputStreamOperator<Tuple2<String, Message>> groupedStream = timestampedStream
//                .keyBy(message -> message.getGroupField())
//                .process(new ExtractMinMaxMessagesFunction());

        // 输出结果
//        groupedStream.print();

        env.execute("Kafka Message Processor");
    }

    static class Message {
        private LocalDateTime timestamp;
        private String groupField;

        public Message(LocalDateTime timestamp, String groupField) {
            this.timestamp = timestamp;
            this.groupField = groupField;
        }

        public LocalDateTime getTimestamp() {
            return timestamp;
        }

        public String getGroupField() {
            return groupField;
        }
    }

    static class ExtractMinMaxMessagesFunction extends KeyedProcessFunction<String, Message, Tuple2<String, Message>> {

        private ValueState<Message> earliestMessage;
        private ValueState<Message> latestMessage;

        @Override
        public void open(Configuration parameters) throws Exception {
            earliestMessage = getRuntimeContext().getState(new ValueStateDescriptor<>("earliest-message", TypeInformation.of(Message.class)));
            latestMessage = getRuntimeContext().getState(new ValueStateDescriptor<>("latest-message", TypeInformation.of(Message.class)));
        }

        @Override
        public void processElement(Message message, Context ctx, Collector<Tuple2<String, Message>> out) throws Exception {
            Message currentEarliest = earliestMessage.value();
            Message currentLatest = latestMessage.value();

            if (currentEarliest == null || message.getTimestamp().isBefore(currentEarliest.getTimestamp())) {
                earliestMessage.update(message);
            }

            if (currentLatest == null || message.getTimestamp().isAfter(currentLatest.getTimestamp())) {
                latestMessage.update(message);
            }

            ctx.timerService().registerEventTimeTimer(message.getTimestamp().plusDays(1).atZone(ZoneOffset.UTC).toInstant().toEpochMilli());
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<Tuple2<String, Message>> out) throws Exception {
            Message earliest = earliestMessage.value();
            Message latest = latestMessage.value();

            if (earliest != null) {
                out.collect(Tuple2.of(ctx.getCurrentKey(), earliest));
            }

            if (latest != null) {
                out.collect(Tuple2.of(ctx.getCurrentKey(), latest));
            }

            earliestMessage.clear();
            latestMessage.clear();
        }
    }
}