package com.shujia.flink.state;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;

public class Demo1NoState {
    public static void main(String[] args) throws Exception {
        //1、创建flink的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //nc -lk 8888
        DataStream<String> linesDS = env.socketTextStream("master", 8888);

        KeyedStream<String, String> keyByDS = linesDS.keyBy(word -> word);

        //统计单词的数量
        DataStream<Tuple2<String, Integer>> countDS = keyByDS
                .process(new KeyedProcessFunction<String, String, Tuple2<String, Integer>>() {

                    //每个task中共享同一个成员变量，结果会冲突
                    //int count = 0;

                    //使用单词作为key，数量作为value，为每一个单词保存一个值
                    /*
                     * 问题
                     * 1、hashmap的数据是保存在java的内存中的，如果任务执行失败内存中的数据会丢失
                     * 2、在流处理中任务失败重启没有办法获取到历史数据，kafka也只能获取到最近7天的数据
                     *
                     * 容错：任务在任何清情况下执行失败，重启之后都要保证结果的正确性
                     */
                    HashMap<String, Integer> countMap = new HashMap<>();

                    //processElement每一条数据执行一次
                    @Override
                    public void processElement(String word,//一行数据
                                               KeyedProcessFunction<String, String, Tuple2<String, Integer>>.Context ctx, //上下文对象
                                               Collector<Tuple2<String, Integer>> out//用于将数据发送到下游
                    ) throws Exception {

                        Integer count = countMap.getOrDefault(word, 0);
                        //累加
                        count++;
                        countMap.put(word, count);
                        //将结果发送到下游
                        out.collect(Tuple2.of(word, count));
                    }
                });

        countDS.print();

        env.execute();

    }
}
