package com.study.wc;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @Description:
 * @Author: LiuQun
 * @Date: 2022/7/24 16:15
 */
public class UnBoundedStreamWordCount {
    public static void main(String[] args) throws Exception {
        // 1.创建流式执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 2.读取文本流：读取指定服务器上的文本流数据
        //法一：从参数中提前主机名和端口号
        // ParameterTool parameterTool = ParameterTool.fromArgs(args);
        // String host = parameterTool.get("host");
        // int port = parameterTool.getInt("port");
        // DataStreamSource<String> lineDSS2 = env.socketTextStream(host,port);
        //法二：直接写上或者从其他配置中读取
        DataStreamSource<String> lineDSS = env.socketTextStream("192.168.200.131",7777);

        // 3.转换数据格式
        SingleOutputStreamOperator<Tuple2<String, Long>> wordAnd1Tuple = lineDSS.flatMap(
                (String line, Collector<Tuple2<String, Long>> out) -> {
                    String[] wordArr = line.split(" ");
                    for (String word : wordArr) {
                        out.collect(Tuple2.of(word, 1L));
                    }
                }
        ).returns(Types.TUPLE(Types.STRING, Types.LONG));//因为泛型擦除，因此需要在返回时指定数据类型
        // 4.分组
        KeyedStream<Tuple2<String, Long>, String> wordAnd1KS = wordAnd1Tuple.keyBy(data -> data.f0);
        // 5.求和
        SingleOutputStreamOperator<Tuple2<String, Long>> sum = wordAnd1KS.sum(1);
        // 6.打印
        sum.print();
        // 7.执行
        env.execute();
    }
}
