package com.shujia.flink.state;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;

public class Demo1NoState {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(2);

        DataStream<String> wordsDS = env.socketTextStream("master", 8888);

        //安装单词分组
        KeyedStream<String, String> keyByDS = wordsDS.keyBy(word -> word);

        DataStream<Tuple2<String, Integer>> countDS = keyByDS
                .process(new KeyedProcessFunction<String, String, Tuple2<String, Integer>>() {

                    //每一个task中的key共享同一个成员变量
                    //Integer count = 0;

                    //使用hashmap保存中间结果，以单词作为key,单词的数量作为value
                    //map集合会为每一个task中的key都保存一个值

                    /**
                     * hashmap的数据保存再java的堆内存中，任务执行失败数据会丢失
                     * flink是7*24小时运行的程序，如果任务执行失败中间结果丢失，无法从头计算一次，会导致结果出错
                     *
                     * flink的checkpoint不会将java的一个普通的集合定期持久化，所以使用java集合会丢失状态
                     *
                     */
                    HashMap<String, Integer> countMap = new HashMap<>();

                    /**
                     * processElement每一条数据执行一次
                     * @param word 一行数据
                     * @param ctx 上下文对象
                     * @param out 用于将数据发送到下游
                     */
                    @Override
                    public void processElement(String word,
                                               KeyedProcessFunction<String, String, Tuple2<String, Integer>>.Context ctx,
                                               Collector<Tuple2<String, Integer>> out) throws Exception {
                        System.out.println(countMap);
                        //从map中获取单词的数量
                        Integer count = countMap.getOrDefault(word, 0);
                        //累加计算
                        count++;
                        //更新map中单词的数量
                        countMap.put(word, count);
                        //将结果发送到下游
                        out.collect(Tuple2.of(word, count));
                    }
                });

        countDS.print();

        env.execute();
    }
}
