package com.shujia.flink.state;

import lombok.AllArgsConstructor;
import lombok.Data;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

public class Demo5StateTTL {
    public static void main(String[] args)throws Exception {
        //创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //读取数据
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                //kafka broker列表
                .setBootstrapServers("master:9092,node1:9092,node2:9092")
                //指定topic
                .setTopics("lines")
                //消费者组
                .setGroupId("Demo1NoState")
                //读取数据的位置
                .setStartingOffsets(OffsetsInitializer.latest())
                //数据格式
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        //使用kafka source
        DataStream<String> lines = env
                .fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka source");

        //一行转换成多行
        DataStream<String> words = lines.flatMap((line, collect) -> {
            for (String word : line.split(",")) {
                collect.collect(word);
            }
        }, Types.STRING);

        //分组
        KeyedStream<String, String> keyBys = words.keyBy(word -> word);


        DataStream<WordCount> wordCounts = keyBys.process(new KeyedProcessFunction<String, String, WordCount>() {

            ValueState<Integer> countState;

            @Override
            public void open(Configuration parameters) throws Exception {

                RuntimeContext context = getRuntimeContext();

                ValueStateDescriptor<Integer> valueStateDescriptor = new ValueStateDescriptor<>("count", Types.INT);

                //构建TTL配置对象
                StateTtlConfig stateTtlConfig = StateTtlConfig
                        //状态有效期
                        .newBuilder(Time.seconds(5))
                        .build();
                //增加状态有效期
                valueStateDescriptor.enableTimeToLive(stateTtlConfig);

                countState = context.getState(valueStateDescriptor);
            }

            @Override
            public void processElement(String word, KeyedProcessFunction<String, String, WordCount>.Context ctx, Collector<WordCount> out) throws Exception {
                Integer count = countState.value();
                if (count == null) {
                    count = 0;
                }
                count++;
                countState.update(count);

                //将数据发送到下游
                out.collect(new WordCount(word, count));
            }
        });
        wordCounts.print();

        env.execute();

    }

    @Data
    @AllArgsConstructor
    static class WordCount{
        private String word;
        private Integer count;
    }
}
