package demo.idle;

import com.alibaba.fastjson2.JSONObject;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.io.Serializable;
import java.time.Duration;
import java.util.Iterator;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * @author wangxi created on 2023/7/30 22:38
 * @version v1.0
 *
 * https://www.modb.pro/db/180420
 *
 *  withIdleness 方法作用 :
 *
 * 当使用event time的时候，有很大的几率会发生数据倾斜，例如我们有一个kafka集群，
 * 其中，有一个topic，有三个分区，其中三个分区中，某个分区有很多数据写入，其他的分区 数据很少，
 * 那么这个时候，就不会触发计算，那么可以通过将不繁忙的分区标记为空闲，这样watermark就可以往下走了
 */
public class IdlenessJob {
    public static void main(String[] args) throws Exception {
        //自己封装的带环境区分的参数工具类
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置kafka消费基本信息
        String bootstrapServers = "127.0.0.1:9092";
        String groupId = "idleness-group";
        Properties kafkaProp = new Properties();
        kafkaProp.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        kafkaProp.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);

        FlinkKafkaConsumer<Buy> kafkaConsumer = new FlinkKafkaConsumer(TopicEntity.topic_for_2_partition, new KafkaDeserializationSchema<Buy>() {
            @Override
            public boolean isEndOfStream(Buy nextElement) {
                return false;
            }

            @Override
            public Buy deserialize(ConsumerRecord<byte[], byte[]> record) throws Exception {
                //将接收到的数据对象 解析到 Buy 封装的对象中
                String json = new String(record.value(), "UTF-8");
                return JSONObject.parseObject(json, Buy.class);
            }

            @Override
            public TypeInformation getProducedType() {
                return TypeInformation.of(Buy.class);
            }
        }, kafkaProp);

        kafkaConsumer.setStartFromLatest(); // 每次启动从卡夫卡最新数据拉取

        //抽取EventTime生成Watermark  可以处理迟到2秒数据
        FlinkKafkaConsumerBase<Buy> source = kafkaConsumer
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<Buy>forBoundedOutOfOrderness(Duration.ofSeconds(2))
//                                .withIdleness(Duration.ofSeconds(5))
                                .withTimestampAssigner((e, t) -> e.ts));

        DataStreamSource<Buy> dataStreamSource = env.addSource(source);

        //通过name分组后 5秒滚动一个窗口
        SingleOutputStreamOperator<Tuple4<String, Long, String, String>> process = dataStreamSource.keyBy(e -> e.name)
                .window(TumblingEventTimeWindows.of(Time.of(10, TimeUnit.SECONDS)))
                .process(new ProcessWindowFunction<Buy, Tuple4<String, Long, String, String>, String, TimeWindow>() {
                    @Override
                    public void process(String key, Context context, Iterable<Buy> elements, Collector<Tuple4<String, Long, String, String>> out) throws Exception {
                        long start = context.window().getStart();  //窗口开始时间
                        long end = context.window().getEnd();      //窗口结束时间
                        Long total = 0L;                           //总条数
                        String name = "";                          //用户名
                        for (Iterator<Buy> iterator = elements.iterator(); iterator.hasNext(); ) {
                            Buy next = iterator.next();
                            name = name  + next.name + ":" + DateFormatUtils.format(next.getTs(), DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT.getPattern()) + " - ";
                            total += 1;
                        }
                        out.collect(Tuple4.of(name, total,
                                DateFormatUtils.format(start, DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT.getPattern()),
                                DateFormatUtils.format(end, DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT.getPattern())));
                    }
                });

        //控制台输出
        process.print();

        env.execute("Test IdlenessJob");
    }

    public static class Buy implements Serializable {
        private String name;  //用户名
        private Long ts;      //业务时间

        //get set 省略


        public String getName() {
            return name;
        }

        public Buy setName(String name) {
            this.name = name;
            return this;
        }

        public Long getTs() {
            return ts;
        }

        public Buy setTs(Long ts) {
            this.ts = ts;
            return this;
        }
    }
}
