package com.atguigu.kafka.flink;

import com.atguigu.kafka.flink.model.AccRes;
import com.atguigu.kafka.flink.model.Person;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.PrintSinkFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.time.ZoneOffset;
import java.util.Properties;

public class aggregate {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        env.setParallelism(1);
        env.enableCheckpointing(5000);  //根据process Time提交偏移量，而不是event Time
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(3600000);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(60000);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);


        // 設置kafka的消費測
        FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>("flink_test", new SimpleStringSchema(), properties());
        //从最新偏移量开始消费，之前存在kafka中的消息不会被消费  log-end-offset
//        kafkaConsumer.setStartFromLatest();
        //从最后提交的偏移量的位置开始读取消息  current-offset
        kafkaConsumer.setStartFromGroupOffsets();

        DataStream<String> kafkaStream = env.addSource(kafkaConsumer);
        kafkaStream
                .flatMap(new PersonFlatMapFunction()).name("格式化数据")
                .assignTimestampsAndWatermarks(WatermarkStrategy.<Person>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner((element, recordTimestamp) -> {
                            return element.getDatetime().atZone(ZoneOffset.UTC).toInstant().toEpochMilli();
                        }))
                .keyBy(new PersonKeySelector())
//                .window(TumblingProcessingTimeWindows.of(Time.seconds(2)))
//                .countWindow(11)
                .window(TumblingEventTimeWindows.of(Time.seconds(20)))
                .aggregate(new PersonAggregateFunction(),new MyProcessWindowFunction()).name("聚合数据")
                .addSink(new PrintSinkFunction<>());

        //开始执行
        env.execute("flink streaming hello word");
    }
    private static Properties properties() {
        Properties props = new Properties();
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.254.131.147:9092");
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "fink_group1");
//        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        props.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        return props;
    }
    public static class MyProcessWindowFunction extends ProcessWindowFunction<AccRes, AccRes, String, TimeWindow> {
        private transient ListState<AccRes> windowResults;
        private transient ValueState<Integer> windowCount;
        @Override
        public void open(Configuration parameters) throws Exception {
            ListStateDescriptor<AccRes> descriptor = new ListStateDescriptor<>(
                    "window-results",
                    AccRes.class
            );
            windowResults = getRuntimeContext().getListState(descriptor);

            ValueStateDescriptor<Integer> countDescriptor = new ValueStateDescriptor<>(
                    "window-count",
                    Integer.class
            );
            windowCount = getRuntimeContext().getState(countDescriptor);
        }


        @Override
        public void process(String s,Context context, Iterable<AccRes> elements, Collector<AccRes> out) throws Exception {
            Integer count = windowCount.value();
            if (count == null) {
                count = 0;
            }

            // 收集当前窗口的结果
            for (AccRes result : elements) {
                windowResults.add(result);
            }

            // 增加窗口计数
            count += 1;
            windowCount.update(count);

            // 当处理了3个窗口时，合并结果并输出
            if (count == 3) {
                AccRes finalResult = new AccRes();
                double totalMoney = 0;
                int totalCount = 0;
                int maxMoney=0;
                int minMoney=0;

                // 合并所有窗口的结果
                for (AccRes result : windowResults.get()) {
                    totalMoney += result.getAvg() * result.getCount();
                    totalCount += result.getCount();
                    maxMoney=Math.max(maxMoney,result.getMaxMoney());
                    minMoney=Math.min(minMoney,result.getMinMoney());
                }

                finalResult.setAvg(totalMoney / totalCount);
                finalResult.setCount(totalCount);
                finalResult.setMaxDiffMoney(maxMoney-minMoney);
                finalResult.setMaxMoney(maxMoney);
                finalResult.setMinMoney(minMoney);

                // 清空状态
                windowResults.clear();
                windowCount.clear();

                // 输出合并结果
                out.collect(finalResult);
            }
        }
    }
}
