package com.shujia.flink.state;

import lombok.AllArgsConstructor;
import lombok.Data;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.HashMap;

public class Demo3State {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("master:9092,node1:9092,node2:9092")//broker列表
                .setTopics("words")//指定topic
                .setGroupId("Demo5KafkaSource")//指定消费者组，保证一条数据在一个组内只消费一次
                .setStartingOffsets(OffsetsInitializer.latest())//指定起始消费的位置
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        //使用kafka source 创建流（无界流）
        DataStream<String> linesDS = env
                .fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");

        //使用lambda表达式处理数据
        DataStream<String> wordsDS = linesDS
                .flatMap((line, out) -> {
                    for (String word : line.split(",")) {
                        out.collect(word);
                    }
                }, Types.STRING);

        //按照单词的分组
        KeyedStream<String, String> keyByDS = wordsDS.keyBy(word -> word);

        DataStream<WordCount> countDS = keyByDS
                .map(new RichMapFunction<String, WordCount>() {

                    //在每一个task中共享同一个变量
                    //Integer count = 0;

                    //hashmap的数据保存在JVM的堆内存中，如果JVM结束了，内存中的数据会丢失
                    //hashmap不会被flink的checkpoint进行持久化
                    //final HashMap<String, Integer> counts = new HashMap<>();

                    //状态变量：和普通java变量的区别，状态会被checkpoint持久化到HDFS中，不会丢失
                    //ValueState：单值状态，为每一个key在状态中保存一个值
                    ValueState<Integer> valueState;

                    //task启动的是偶执行，用于初始化状态
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        //获取flink运行环境
                        RuntimeContext runtimeContext = getRuntimeContext();
                        //初始化状态
                        valueState = runtimeContext.getState(new ValueStateDescriptor<>("count", Integer.class));
                    }

                    //每一条数据执行一次
                    @Override
                    public WordCount map(String word) throws Exception {

                        //从字典中获取数量（获取状态）
                        //Integer count = counts.getOrDefault(word, 0);
                        //count += 1;

                        //更新map中的结果（更新状态）
                        //counts.put(word, count);

                        //1、从转台中获取结果
                        Integer count = valueState.value();
                        if (count == null) {
                            count = 0;
                        }
                        //2、累加计算
                        count += 1;

                        //3、更新状态
                        valueState.update(count);

                        return new WordCount(word, count);
                    }
                });

        countDS.print();

        env.execute();
    }
}

@Data
@AllArgsConstructor
class WordCount {
    private String word;
    private Integer count;
}