import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class Demo01_wordCount_stream {

    public static void main(String[] args) {

        //0.创建流式处理的环境
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        environment.setParallelism(1);

        //1.sourceAPI读取数据源的数据
        DataStreamSource<String> dataStreamSource = environment.socketTextStream("node101",9999);

        /**
         * 有些算子需要使用returns方法明确指定返回值类型
         *      1.当算子输入的类型和输出的类型不一致时，需要手动设置输出类型，否则可能会导致类型转换错误或异常
         *      2.当算子使用了泛型或类型变量时，需要手动设置输出类型
         */
        dataStreamSource.flatMap(
            (String value, Collector<String> out) -> {
                String[] words = value.split(" ");
                for (String word : words) {
                    out.collect(word);
                }
            }
        )
                .returns(Types.STRING)
                .map(
                    value -> Tuple2.of(value,1)
                )
                .returns(Types.TUPLE(Types.STRING,Types.INT)).keyBy(
                    value-> value.f0
                )
                .reduce(
                        (value1, value2) -> Tuple2.of(value1.f0,value1.f1+value2.f1)
                )
                .print();

        try {
            environment.execute("wc");
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

}

