package cn._51doit.flink.day05;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.HashMap;
import java.util.Map;

/**
 * 深入研究Flink的底层是怎样实现的
 *
 * 使用FLink的状态编程API
 *
 * Flink的状态分为两种
 *   KeyedState （对KeyBy之后的Stream进行处理的，即KeyedStream）
 *       - ValueState 相当于Map<KEY, VALUE>
 *       - MapState   相当于Map<KEY, Map<k, v>>
 *       - ListState  相当于Map<KEY, List<E>>
 *   OperatorState（对没有KeyBy的Stream进行处理的，即普通的Stream）
 *
 */
public class ValueStateDemo {


    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //开启checkpoint
        //env.enableCheckpointing(10000);

        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String in, Collector<String> out) throws Exception {
                String[] words = in.split(" ");
                for (String word : words) {
                    if (word.startsWith("error")) {
                        throw new RuntimeException("数据出现了问题！！！");
                    }
                    //输出数据
                    out.collect(word);
                }
            }
        });

        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOne = words.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String word) throws Exception {
                return Tuple2.of(word, 1);
            }
        });

        KeyedStream<Tuple2<String, Integer>, String> keyedStream = wordAndOne.keyBy(t -> t.f0);

        //对KeyedStream调用map方法，实现与sum或reduce相同的功能
        SingleOutputStreamOperator<Tuple2<String, Integer>> res = keyedStream.map(new RichMapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {

            private ValueState<Integer> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                //初始化或恢复状态
                //定义状态描述器（描述状态的名称和类型）
                ValueStateDescriptor<Integer> stateDescriptor = new ValueStateDescriptor<>("count-state", Integer.class);
                //通过运行时上下文初始化或恢复状态
                valueState = getRuntimeContext().getState(stateDescriptor);

            }

            @Override
            public Tuple2<String, Integer> map(Tuple2<String, Integer> tp) throws Exception {
                String word = tp.f0;
                Integer currentCount = tp.f1;
                Integer history = valueState.value();
                if (history == null) {
                    history = 0;
                }
                int sum = history + currentCount;
                //更新状态
                valueState.update(sum);
                tp.f1 = sum;
                //输出数据
                return tp;
            }
        });

        res.print();

        env.execute();

    }

}
