package com.atguigu.day06;

import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;

public class Flink04_ProcessFun_WordCount {
    public static void main(String[] args) throws Exception {
        //1.获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        //2.从端口读取数据
        DataStreamSource<String> streamSource = env.socketTextStream("localhost", 9999);

        //3.将读取过来的数据切成一个个的单词
        SingleOutputStreamOperator<String> wordDStream = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, ProcessFunction<String, String>.Context ctx, Collector<String> out) throws Exception {
                String[] words = value.split(" ");
                for (String word : words) {
                    out.collect(word);
                }
            }
        });

        //4.将一个个单词组成Tuple2元组
        SingleOutputStreamOperator<Tuple2<String, Integer>> wordToOneDStream = wordDStream.process(new ProcessFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void processElement(String value, ProcessFunction<String, Tuple2<String, Integer>>.Context ctx, Collector<Tuple2<String, Integer>> out) throws Exception {
                out.collect(Tuple2.of(value, 1));
            }
        });

        wordToOneDStream.process(new ProcessFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {


            //定义一个kv类型的累加器，k:单词 v：这个单词的个数
            private HashMap<String, Integer> count = new HashMap<>();

            @Override
            public void processElement(Tuple2<String, Integer> value, ProcessFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>.Context ctx, Collector<Tuple2<String, Integer>> out) throws Exception {
                //判断累加器中是否有当前单词的累加器
                if (count.containsKey(value.f0)) {
                    //在累加器中有当前单词的累加器
                    //取出之前累加的值
                    Integer lastCount = count.get(value.f0);
                    lastCount += 1;
                    //更新累加器的值
                    count.put(value.f0, lastCount);
                } else {
                    //在累加器中没有当前单词的累加器
                    count.put(value.f0, 1);
                }

                out.collect(Tuple2.of(value.f0, count.get(value.f0)));
            }
        }).print();

       /* //5.使用keyby将相同单词的数据聚合到一块
        KeyedStream<Tuple2<String, Integer>, Tuple> keyedStream = wordToOneDStream.keyBy(0);

        //6.累加
        SingleOutputStreamOperator<Tuple2<String, Integer>> result = keyedStream.process(new KeyedProcessFunction<Tuple, Tuple2<String, Integer>, Tuple2<String, Integer>>() {

//            private Integer count = 0;
            //定义一个kv类型的累加器，k:单词 v：这个单词的个数
            private  HashMap<String, Integer> count = new HashMap<>();

            @Override
            public void processElement(Tuple2<String, Integer> value, KeyedProcessFunction<Tuple, Tuple2<String, Integer>, Tuple2<String, Integer>>.Context ctx, Collector<Tuple2<String, Integer>> out) throws Exception {
                //判断累加器中是否有当前单词的累加器
                if (count.containsKey(value.f0)){
                    //在累加器中有当前单词的累加器
                    //取出之前累加的值
                    Integer lastCount = count.get(value.f0);
                    lastCount += 1;
                    //更新累加器的值
                    count.put(value.f0, lastCount);
                }else {
                    //在累加器中没有当前单词的累加器
                    count.put(value.f0, 1);
                }

                out.collect(Tuple2.of(value.f0,count.get(value.f0)));
            }
        });

        result.print();*/

        env.execute();
    }
}
