package com.wuwangfu.state;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @Author: jcshen
 * @Date: 2023-03-03
 * @PackageName: com.wuwangfu.state
 * @ClassName: ValueStated
 * @Description:
 * @Version: 1.0.0
 *
 * https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/datastream/fault-tolerance/state/#using-keyed-state
 *
 * keyBy之后，用来存储K-V类型的状态，叫做KeyedState
 *
 */
public class ValueStated {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //checkpoint
        env.enableCheckpointing(10000);
        //重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,5000));
        //source
        //hadoop spark flilnk
        DataStreamSource<String> line = env.socketTextStream("localhost", 8888);
        //transform
        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOne = line.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String line, Collector<Tuple2<String, Integer>> collector) throws Exception {
                String[] words = line.split(" ");
                for (String word : words) {
                    if ("error".equals(word)) {
                        throw new RuntimeException("报错拉");
                    }
                    collector.collect(Tuple2.of(word, 1));
                }
            }
        });
        //分组
        KeyedStream<Tuple2<String, Integer>, String> keyed = wordAndOne.keyBy(t -> t.f0);
        //state
        keyed.map(new RichMapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {

            /**
             * ValueState
             *  ->HeapValueState
             *       public V value()
             *          org.apache.flink.runtime.state.heap.StateTable#get(java.lang.Object)
             *              org.apache.flink.runtime.state.heap.StateTable#get(java.lang.Object, int, java.lang.Object)
             *
             */
            private transient ValueState<Integer> counter;

            @Override
            public void open(Configuration parameters) throws Exception {
                //想使用状态，先定义一个状态描述器（State的类型，名称）
                ValueStateDescriptor<Integer> stateDesc = new ValueStateDescriptor<>("wc-desc", Integer.class);
                //初始化或恢复历史状态
                counter = getRuntimeContext().getState(stateDesc);

            }

            @Override
            public Tuple2<String, Integer> map(Tuple2<String, Integer> input) throws Exception {
                //获取输入数据
                String word = input.f0;
                Integer count = input.f1;
                //获取状态数据值
                Integer state = counter.value();
                //对状态值进行判断
                if (state == null){
                    state = 0;
                }
                //将状态值和当前值进行累加
                int total = count + state;
                //更新状态（内存中）
                counter.update(total);
                //赋值：累加后的值
                input.f1 = total;
                //输出，返回
                return input;
            }
        }).print();

        env.execute();

    }
}
