package com.haoziqi.chapter_01;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * TODO 无界流，处理Socket数据
 * 测试方法：
 * 开发模式测试
 *      1.在liunx输入 nc -lk 9999打开netcat
 *      2.执行本程序
 *
 *
 * description
 * created by A on 2021/3/2
 */
public class UnBoundedStream {
    public static void main(String[] args) throws Exception {
        //无界流
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //2.读取数据
        DataStreamSource<String> hp = env.socketTextStream("hadoop102", 9999);
        //3.处理数据
        //3.1数据做扁平化处理
        SingleOutputStreamOperator<String> sso = hp.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String s, Collector<String> collector) throws Exception {
                String[] s1 = s.split(" ");
                for (String str : s1) {
                    collector.collect(str);
                }
            }
        });
        //3.2 转换数据格式 将 hello 转换为(hello,1)
        SingleOutputStreamOperator<Tuple2<String, Long>> map = sso.map(new MapFunction<String, Tuple2<String, Long>>() {
            @Override
            public Tuple2<String, Long> map(String s) throws Exception {
                return Tuple2.of(s, 1L);
            }
        });
        //3.3对map进行聚合操作
        SingleOutputStreamOperator<Tuple2<String, Long>> sum = map.keyBy(0).sum(1);
        sum.print();
        env.execute();


    }
}
