package com.lsx143.wordcount.day6;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Flink_OperatorState {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        env
                .socketTextStream("hadoop162", 9999)
                .map(new MyMapFunction())
                .print();
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static class MyMapFunction implements MapFunction<String, Integer>, CheckpointedFunction {

        private ListState<Integer> state;
        private Integer count = 0;

        @Override
        public Integer map(String value) throws Exception {
            return ++count;
        }

        //周期性保存
        @Override
        public void snapshotState(FunctionSnapshotContext context) throws Exception {
            state.clear();
            state.add(count);
        }

        //初始化、重启时执行
        @Override
        public void initializeState(FunctionInitializationContext context) throws Exception {
            state = context
                    .getOperatorStateStore()
                    .getListState(new ListStateDescriptor<>("state", Types.INT));
            for (Integer value : state.get()) {
                count += value;
            }
        }
    }
}