package day01;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import po.WordWithCount;

/**
 * @Description: TODO 从离线数据读取数据然后处理   word count
 * @Author: ZYX
 * @Date: 2022/2/8 10:05
 * @Version: 1.0
 */
public class Demo02 {

    public static void main(String[] args) throws Exception {
        // 获取流处理的运行时环境
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行任务的数量为1
        executionEnvironment.setParallelism(1);

        // 读取数据源
        DataStreamSource<String> stream = executionEnvironment.fromElements("hello world ", "hello world ", "天真", "天真 无邪", "无邪");

        // map操作
        // 这里使用flatmap方法
        // 输入泛型string 输出泛型wordwithcount
        SingleOutputStreamOperator<WordWithCount> mappedStream = stream.flatMap(new FlatMapFunction<String, WordWithCount>() {
            @Override
            public void flatMap(String value, Collector<WordWithCount> out) throws Exception {
                String[] arr = value.split(" ");
                // 使用collect方法向下游发送数据
                for (String s : arr) {
                    out.collect(new WordWithCount(s, 1L));
                }
            }
        });

        // 分组 ： shuffle
        KeyedStream<WordWithCount, String> keyedStream = mappedStream
                // 第一个泛型：流中元素的泛型
                // 第二个泛型：key的泛型
                .keyBy(new KeySelector<WordWithCount, String>() {
                    @Override
                    public String getKey(WordWithCount wordWithCount) throws Exception {
                        return wordWithCount.word;
                    }
                });

        // reduce操作
        // reduce会维护一个累加器
        // 第一条数据到来，作为累加器输出
        // 第二条数据到来，和累加器进行聚合操作，然后输出累加器
        // 累加器和流中元素的类型是一样的
        SingleOutputStreamOperator<WordWithCount> result = keyedStream.reduce(new ReduceFunction<WordWithCount>() {
            @Override
            public WordWithCount reduce(WordWithCount value1, WordWithCount value2) throws Exception {
                return new WordWithCount(value1.word, value1.count + value2.count);
            }
        });

        // 输出
        result.print();

        // 执行程序

        executionEnvironment.execute();
    }

}
