package com.atguigu.wc;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;


public class WordCountStreamUnboundedDemo {
    public static void main(String[] args) throws Exception {
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 用于本地测试，启动webui 需要引入一个依赖flink-runtime-web
        // 在idea运行，不指定并行度，默认就是电脑的线程数
        // 并行度优先级：代码：算子 > 代码：env > 提交时指定 > 配置文件
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        env.setParallelism(3);

        DataStreamSource<String> socketDS = env.socketTextStream("hadoop102", 7777);

        SingleOutputStreamOperator<Tuple2<String, Integer>> sum = socketDS
                .flatMap(
                        (String value, Collector<Tuple2<String, Integer>> out) -> {
                            String[] words = value.split(" ");
                            for (String word : words) {
                                Tuple2<String, Integer> wordTuple2 = Tuple2.of(word, 1);
                                out.collect(wordTuple2);
                            }
                        }
                )
//                .setParallelism(2)
                .returns(Types.TUPLE(Types.STRING, Types.INT)) // 解决泛型擦除问题
                .keyBy(value -> value.f0)
                .sum(1);

        sum.print();
        env.execute();
    }
}
