package stateful;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.io.*;
import java.util.HashMap;

/**
 * 自定义状态管理
 */
public class MyKeyedStateDemo2 {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());

        DataStreamSource<String> lines = env.socketTextStream("hadoop1", 8888);

        // 设置重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, 20000));

        lines.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Integer>> out) throws Exception {
                for (String w : s.split(" ")) {
                    if (w.contains("error")) {
                        throw new RuntimeException("数据出错!");
                    } else {
                        out.collect(Tuple2.of(w, 1));
                    }
                }
            }
        }).keyBy(t -> t.f0)
                .map(new MyMap()).print();

        env.execute("");
    }
}

class MyMap extends RichMapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>> {

    private HashMap<String, Integer> myState;

    @Override
    public void open(Configuration parameters) throws Exception {
        File file = new File("ck/" + getRuntimeContext().getIndexOfThisSubtask() + "_task");
        if (file.exists()) {
            ObjectInputStream in = new ObjectInputStream(new FileInputStream(file));
            myState = (HashMap<String, Integer>) in.readObject();
        } else{
            myState = new HashMap<>();
        }
        // 定期更新状态
        new Thread(() -> {
            try {
                while (true){
                    Thread.sleep(10000);
                    if (!file.exists()) file.createNewFile();
                    System.out.println("checkpoint....");
                    ObjectOutputStream out = new ObjectOutputStream(new FileOutputStream(file));
                    out.writeObject(myState);
                    out.flush();
                    out.close();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }).start();
    }

    @Override
    public Tuple2<String, Integer> map(Tuple2<String, Integer> value) throws Exception {
        String word = value.f0;
        Integer currentCount = value.f1;
        Integer HistoryCount = myState.get(word);
        if (HistoryCount == null) HistoryCount = 0;
        Integer totalCount = currentCount + HistoryCount;
        myState.put(word, totalCount);
        return Tuple2.of(word, totalCount);
    }
}
