package com.zhang.first.day01;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @title: word count
 * @author: zhang
 * @date: 2022/1/14 20:54
 */
public class Example2 {
    public static void main(String[] args) throws Exception {
        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);

        //从文件中读取数据 localhost 9999
        DataStreamSource<String> streamSource = env.readTextFile("/Users/apple/IdeaProjects/flink_1.13/src/main/resources/word.txt");

        // map操作
        // 使用flatMap
        // (hello, 1)
        // (world, 1)
        // flatMap的语义：将列表或者流中的每一个元素，转换成0个，1个或者多个元素
        // flatMap是无状态的算子
        SingleOutputStreamOperator<Tuple2<String, Integer>> flatMapDS = streamSource.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            // 匿名类
            // 第一个泛型是：输入的泛型
            // 第二个泛型是：输出的泛型
            @Override
            public void flatMap(String in, Collector<Tuple2<String, Integer>> out) throws Exception {
                String[] arrs = in.split(" ");

                //循环写出
                for (String arr : arrs) {
                    out.collect(new Tuple2<>(arr, 1));
                }
            }
        });

        // shuffle
        // 将不同单词的元组shuffle到不同的逻辑分区
        KeyedStream<Tuple2<String, Integer>, String> keyedStream = flatMapDS.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
            // 第一个泛型：输入数据的泛型
            // 第二个泛型：key的泛型
            @Override
            public String getKey(Tuple2<String, Integer> in) throws Exception {
                //f0为元组第一个元素
                return in.f0;
            }
        });

        //reduce 将相同逻辑分区的数据进行聚合
        //sum是有状态的算子
        SingleOutputStreamOperator<Tuple2<String, Integer>> reducedStream = keyedStream.sum("f1");

        //打印
        reducedStream.print();


        //执行任务
        env.execute();


    }
}
