package cn._51doit.day06;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @create: 2021-10-22 15:28
 * @author: 今晚打脑斧先森
 * @program: ValueStateDemo
 * @Description:
 *      使用Flink的KeyedState进行编程
 *      KeyedState中有多种类型,ValueState就是其中一种
 **/
public class ValueStateDemo {
    public static void main(String[] args) throws Exception {
        Configuration configuration = new Configuration();
        configuration.setInteger("rest.port",8081);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration);

        //开启checkpoint,默认的重启策略是无限重启,即Integer.MAX_VALUE
        env.enableCheckpointing(10000);
        DataStreamSource<String> lines = env.socketTextStream("doit01", 8888);

        //spark,1
        SingleOutputStreamOperator<Tuple2<String, Integer>> mapped = lines.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                String[] split = value.split(",");
                String s = split[0];
                if (s.startsWith("error")) {
                    throw new RuntimeException("亲爱的今晚打脑斧先森,数据出错了哦");
                }
                return Tuple2.of(split[0], Integer.parseInt(split[1]));
            }
        });
        KeyedStream<Tuple2<String, Integer>, String> keyedStream = mapped.keyBy(tp -> tp.f0);
        SingleOutputStreamOperator<Tuple2<String, Integer>> res = keyedStream.map(new ValueStateReduceFunction());

        res.print();
        env.execute();
    }
    //使用ValueState
    private static class ValueStateReduceFunction extends RichMapFunction<Tuple2<String,Integer>,Tuple2<String,Integer>>{
        private ValueState<Integer> valueState ;

        /**
         * open方法再map执行之前一定会调用一次
         * 初始化恢复状态
         */
        @Override
        public void open(Configuration parameters) throws Exception {
            //使用状态的步骤
            /**
             * 1 定义状态描述器(状态的类型,状态的名称)
             * 描述以后存储的数据是什么类型的,不用定义Keyby
             * 状态描述器的名字不能相同,不然别的分区来进行状态的更改了,会混乱的,所以的分区更改同一个状态,你说乱不乱
             * 正常来讲是存取key和value,但是使用keyby只会的状态,跟key没有关系了,会自动保存key,你只要搞value就行了
             * 如果value中有泛型,要中typeInformation
             */
            ValueStateDescriptor<Integer> valueStateDescriptor = new ValueStateDescriptor<>("这个是今晚打脑斧的分区", Integer.class);
            /**
             * 2 根据状态描述器初始化或恢复状态
             * 初始化,如果以前没有存储状态,要是存储过,就会度数据
             * 拿到subtask的运行时上下文,获取历史状态,如果没有取到就会初始化
             */
            valueState = getRuntimeContext().getState(valueStateDescriptor);
        }

        @Override
        public Tuple2<String, Integer> map(Tuple2<String, Integer> in) throws Exception {
            String word = in.f0;
            Integer currentCount = in.f1;
            //根据单词找到历史次数,如果没有数据 底层会根据当前key进行取数据 想看底层的话,就ctl+alt 点击value选择HeapValueState,你就看到源码了
            //内部使用了keyContext获取当前的单词
            Integer historyCount = valueState.value();
            if (historyCount==null){
                historyCount=0;
            }
            int totalCount = historyCount +currentCount;
            valueState.update(totalCount);
            in.f1=totalCount;
            return in;

        }
    }
}
