package com.atguigu.flink.chapter07.state;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.ArrayList;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/12/17 14:37
 */
public class Flink01_Operator_ListState {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(2);
        env.enableCheckpointing(3000);  // 3s做一次快照
        SingleOutputStreamOperator<String> stream = env
            .socketTextStream("hadoop162", 9999)
            .flatMap(new MyFlatMapFunction());
    
        stream.print();
    
        stream
            .process(new ProcessFunction<String, String>() {
                @Override
                public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                    if (value.contains("x")) {
                        throw new RuntimeException("故意抛一个异常, 让flink程序自动重启");
                    }
                
                }
            })
            .print();
    
        env.execute();
    }
    
    public static class MyFlatMapFunction implements FlatMapFunction<String, String>, CheckpointedFunction {
        
        ArrayList<String> list = new ArrayList<>();
        private ListState<String> wordState;
    
        @Override
        public void flatMap(String line,
                            Collector<String> out) throws Exception {
            String[] words = line.split(" ");
            for (String word : words) {
                list.add(word);
            }
            out.collect(list.toString());
            
        }
        
        // 周期性的把状态进行快照(保存)
        // 每个周期的每个并行度一次
        @Override
        public void snapshotState(FunctionSnapshotContext ctx) throws Exception {
//            System.out.println("MyFlatMapFunction.snapshotState");
            // 把要保存的数据, 存储到算子状态中
            /*wordState.clear();
            for (String word : list) {
                wordState.add(word);
            }*/
            wordState.update(list);
            
        }
        
        // 程序启动, 重启的时候, 自动从保存的状态中恢复数据
        // 一个并行度一次
        @Override
        public void initializeState(FunctionInitializationContext ctx) throws Exception {
            System.out.println("MyFlatMapFunction.initializeState");
            // 获取一个列表状态
            wordState = ctx
                .getOperatorStateStore()
                .getListState(new ListStateDescriptor<String>("wordState", String.class));
            
            // 把状态中的数据恢复到List集合中
            Iterable<String> words = wordState.get();// 获取状态中存储的所有数据
            for (String word : words) {
                list.add(word);
            }
        }
        
        
    }
    
}
