package com.atguigu.day03;

import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;

public class Flink14_Transform_Process {
    public static void main(String[] args) throws Exception {
        //1.获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        //2.从端口读取数据
        DataStreamSource<String> streamSource = env.socketTextStream("localhost", 9999);

        //3.使用Process实现FlatMap功能，将数据按照空格切分并组成Tuple2元组
        SingleOutputStreamOperator<Tuple2<String, Integer>> process = streamSource.process(new ProcessFunction<String, Tuple2<String, Integer>>() {
            /**
             *
             * @param value 输入的数据
             * @param ctx 上下文对象
             * @param out 采集器用来把数据发送至下游
             * @throws Exception
             */
            @Override
            public void processElement(String value, Context ctx, Collector<Tuple2<String, Integer>> out) throws Exception {
                String[] words = value.split(" ");
                for (String word : words) {
                    out.collect(Tuple2.of(word, 1));
                }
            }
        });

        //4.将相同单词的数据聚合到一块
        KeyedStream<Tuple2<String, Integer>, Tuple> keyedStream = process.keyBy(0);

        //5.使用process实现count的功能
        SingleOutputStreamOperator<Tuple2<String, Integer>> result = keyedStream.process(new KeyedProcessFunction<Tuple, Tuple2<String, Integer>, Tuple2<String, Integer>>() {

            //定义一个累加器
            private HashMap<String, Integer> wordCount = new HashMap<>();

            @Override
            public void processElement(Tuple2<String, Integer> value, Context ctx, Collector<Tuple2<String, Integer>> out) throws Exception {

                System.out.println("处理时间："+ctx.timerService().currentProcessingTime());
                //1.判断当前的key和map中的key是否对应，如果对应的话那么取出对应的value值加1
                //如果不存在直接赋值为1
                if (wordCount.containsKey(value.f0)){
                    //map中存在当前的key
                    Integer lastCount = wordCount.get(value.f0);
                    lastCount++;
                    //重新更新数据
                    wordCount.put(value.f0, lastCount);
                    out.collect(Tuple2.of(value.f0,lastCount));
                }else {
                    //map中不存在当前的key
                    wordCount.put(value.f0, 1);
                    out.collect(Tuple2.of(value.f0,1));
                }
            }
        });
        result.print();

        env.execute();
    }
}
