package com.myflink.day01;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;


/**
 * @author Shelly An
 * @create 2020/9/15 10:31
 * 流处理WordCount
 * 无界流：消息队列kafka、socket
 */
public class WordCount_UnBoundedStream {
    public static void main(String[] args) throws Exception {
        //0. 初始化sc
        //f0.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        //1. 读取数据
        //f1. 从文件（有界）读取数据
        DataStreamSource<String> socketDS = env.socketTextStream("localhost", 4444);

        //2. 处理数据
        //f2.1 切分、转换成二元组(word,1) lambda+匿名对象
        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOneTuple = socketDS.flatMap(
                //使用lambda表达式，类型推断会有问题。lambda表达式缺少明确的返回值类型信息，需要使用returns去指定
                (FlatMapFunction<String, Tuple2<String, Integer>>) (r, out) -> {
                    String[] words = r.split(" ");
                    for (String word : words) {
                        Tuple2<String, Integer> tuple = Tuple2.of(word, 1);
                        out.collect(tuple);
                    }
                }
        ).returns(new TypeHint<Tuple2<String, Integer>>() {});

        //f2.2 按照word分组 土话：键控流  直接说英文 - -!
        KeyedStream<Tuple2<String, Integer>, Tuple> wordAndOneKS = wordAndOneTuple.keyBy(0);
        //f2.3 按照分组聚合
        SingleOutputStreamOperator<Tuple2<String, Integer>> result = wordAndOneKS.sum(1);

        //3. 打印/输出/保存
        result.print();

        //4. 启动
        env.execute();
    }

}
