package com.test.flink_introduction;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * 1 获取flink的开发环境
 * 2 加载数据源（有界  无界） nc -lk   DataStream
 * 3. 处理数据 调用算子（方法）
 * 4. 保存结果
 * 5. 执行
 */
public class F01Demo01 {

    public static void main(String[] args) throws Exception {
        System.out.println("enter");
        //1 获取环境
        StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置处理数据的模式
//        see.getConfig().setExecutionMode(ExecutionMode.BATCH);
//        see.getConfig().setExecutionMode(ExecutionMode.PIPELINED);
       // 2 获取数据源
        DataStreamSource<String> data = see.socketTextStream("127.0.0.1", 9001);
        //处理
        SingleOutputStreamOperator<String> words = data.flatMap(new FlatMapFunction<String, String>() {

            /**
             * @param line 行数据
             * @param out
             * @throws Exception
             */
            @Override
            public void flatMap(String line, Collector<String> out) throws Exception {
                //使用Collector收集返回的结果数据
                String[] arr = line.split("\\s+");
                for (String word : arr) {
                    out.collect(word);
                }
            }
        });  

        // 每个元素执行一遍
        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOne = words.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                return Tuple2.of(value, 1);
            }
        });

        // 按照单词分组
        KeyedStream<Tuple2<String, Integer>, String> keyed = wordAndOne.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
            @Override
            public String getKey(Tuple2<String, Integer> value) throws Exception {
                return value.f0;
            }
        });

        SingleOutputStreamOperator<Tuple2<String, Integer>> res = keyed.sum("f1");

        //打印并行度
        System.out.println(res.getParallelism());
        res.print();
        see.execute();
    }
}
