import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class Demo01_wordCount {

    public static void main(String[] args) {

        //0.创建流式处理的环境
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        environment.setParallelism(3);
//        environment.disableOperatorChaining();

        //1.sourceAPI读取数据源的数据
        DataStreamSource<String> dataStreamSource = environment.socketTextStream("node101",9999);

        //2.1 transformation中的 把一行内容变成单词的形式
        DataStream<String> wordsDataStream = dataStreamSource.flatMap(
            new FlatMapFunction<String, String>() {
                @Override
                public void flatMap(String value, Collector<String> out) throws Exception {
                    String[] words = value.split(" ");
                    for (String word : words) {
                        out.collect(word);
                    }
                }
            }
        );


        //2.2 transformation中的 把每个单词变成键值对的形式，单词为key，1为value
        DataStream<Tuple2<String,Integer>> kvDataStream = wordsDataStream.map(
            new MapFunction<String, Tuple2<String,Integer>>() {
                @Override
                public Tuple2<String, Integer> map(String value) throws Exception {
                    return Tuple2.of(value,1);
                }
            }
        );

        //2.2 transformation中的 把每个单词变成键值对的形式，单词为key，1为value
        DataStream<Tuple2<String,Integer>> copyDataStream = kvDataStream.map(
                new MapFunction<Tuple2<String,Integer>, Tuple2<String,Integer>>() {
                    @Override
                    public Tuple2<String, Integer> map(Tuple2<String,Integer> value) throws Exception {
                        return value;
                    }
                }
        );


        //2.3 transformation中的 确定上一步的数据以哪部分作为key，以方便后面的操作
        KeyedStream<Tuple2<String, Integer>,String> keyedDataStream = copyDataStream.keyBy(
            new KeySelector<Tuple2<String, Integer>, String>() {
                @Override
                public String getKey(Tuple2<String, Integer> value) throws Exception {
                    return value.f0;
                }
            }
        );

        //2.4 transformation中的 调用滚动聚合算子求形同key的value之和
        DataStream<Tuple2<String, Integer>> resultDataStream = keyedDataStream.reduce(
            new ReduceFunction<Tuple2<String, Integer>>() {
                @Override
                public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
                    return Tuple2.of(value1.f0,value1.f1+value2.f1);
                }
            }
        );

        //3. sink 算子
        resultDataStream.print();

        try {
            environment.execute("wc");
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

}

